From 74c85026b97deb028125089bbe56baaf764cea22 Mon Sep 17 00:00:00 2001 From: ramky1982 Date: Thu, 10 Sep 2015 11:57:44 +0530 Subject: [PATCH 01/57] Update query_dsl.asciidoc --- docs/reference/migration/migrate_2_0/query_dsl.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc b/docs/reference/migration/migrate_2_0/query_dsl.asciidoc index cb9e1581a91..1d06a14069c 100644 --- a/docs/reference/migration/migrate_2_0/query_dsl.asciidoc +++ b/docs/reference/migration/migrate_2_0/query_dsl.asciidoc @@ -25,7 +25,7 @@ be cacheable. Filter context is introduced by: aggregations or index aliases -- -As a result of this change, he `execution` option of the `terms` filter is now +As a result of this change, the `execution` option of the `terms` filter is now deprecated and ignored if provided. ==== `or` and `and` now implemented via `bool` From 30fe74b5df2cf9f94fc62eb6a90cf15e74f897b2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 28 Nov 2015 16:54:51 -0800 Subject: [PATCH 02/57] Build: Setup standalone tests to compile in intellij This adds the standalone tests so they will compile (and thus can be modified with import completion) within IntelliJ. It also explicitly sets up buildSrc as a module. Note that this does *not* mean eg evil-tests can be run from intellij. These are special tests that require special settings (eg disabling security manager). They need to be run from the command line. closes #15075 --- build.gradle | 6 ++++++ .../gradle/test/StandaloneTestBasePlugin.groovy | 9 ++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/build.gradle b/build.gradle index 785db7ec0c4..20976eff2bd 100644 --- a/build.gradle +++ b/build.gradle @@ -177,6 +177,12 @@ tasks.idea.doLast { if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) { throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ') } +// add buildSrc itself as a groovy project +task buildSrcIdea(type: GradleBuild) { + buildFile = 'buildSrc/build.gradle' + tasks = ['cleanIdea', 'ideaModule'] +} +tasks.idea.dependsOn(buildSrcIdea) // eclipse configuration diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy index 271bc5e58be..c6edc87b9a7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestBasePlugin.groovy @@ -39,9 +39,6 @@ class StandaloneTestBasePlugin implements Plugin { BuildPlugin.globalBuildInfo(project) BuildPlugin.configureRepositories(project) - // remove some unnecessary tasks for a qa test - project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] } - // only setup tests to build project.sourceSets { test @@ -56,6 +53,12 @@ class StandaloneTestBasePlugin implements Plugin { plusConfigurations = [project.configurations.testRuntime] } } + project.idea { + module { + testSourceDirs += project.sourceSets.test.java.srcDirs + scopes['TEST'] = [plus: [project.configurations.testRuntime]] + } + } PrecommitTasks.configure(project) } } From 536e7e126833e91f7aa5168528711685d6a9770d Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 4 Dec 2015 09:43:32 +0100 Subject: [PATCH 03/57] Remove ancient deprecated and alternative recovery settings Several settings have been deprecated or are replaced with new settings after refactorings in version 1.x. This commit removes the support for these settings. The settings are: * `index.shard.recovery.translog_size` * `index.shard.recovery.translog_ops` * `index.shard.recovery.file_chunk_size` * `index.shard.recovery.concurrent_streams` * `index.shard.recovery.concurrent_small_file_streams` * `indices.recovery.max_size_per_sec` --- .../elasticsearch/cluster/ClusterModule.java | 1 - .../cluster/metadata/MetaData.java | 3 +-- .../indices/recovery/RecoverySettings.java | 20 +++++++------------ 3 files changed, 8 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index f2797e3b267..b2e793ba0ab 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -176,7 +176,6 @@ public class ClusterModule extends AbstractModule { registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, Validator.TIME_NON_NEGATIVE); registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); - registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE); registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR); registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER); registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index d4e7baac790..fb54426b5a2 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -749,8 +749,7 @@ public class MetaData implements Iterable, Diffable, Fr IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, - RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, - RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC)); + RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC)); /** All known time cluster settings. */ diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 7ccba843993..749ba4f3360 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -75,12 +75,6 @@ public class RecoverySettings extends AbstractComponent implements Closeable { public static final long SMALL_FILE_CUTOFF_BYTES = ByteSizeValue.parseBytesSizeValue("5mb", "SMALL_FILE_CUTOFF_BYTES").bytes(); - /** - * Use {@link #INDICES_RECOVERY_MAX_BYTES_PER_SEC} instead - */ - @Deprecated - public static final String INDICES_RECOVERY_MAX_SIZE_PER_SEC = "indices.recovery.max_size_per_sec"; - private volatile ByteSizeValue fileChunkSize; private volatile boolean compress; @@ -105,9 +99,9 @@ public class RecoverySettings extends AbstractComponent implements Closeable { public RecoverySettings(Settings settings, NodeSettingsService nodeSettingsService) { super(settings); - this.fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, settings.getAsBytesSize("index.shard.recovery.file_chunk_size", new ByteSizeValue(512, ByteSizeUnit.KB))); - this.translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, settings.getAsInt("index.shard.recovery.translog_ops", 1000)); - this.translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, settings.getAsBytesSize("index.shard.recovery.translog_size", new ByteSizeValue(512, ByteSizeUnit.KB))); + this.fileChunkSize = settings.getAsBytesSize(INDICES_RECOVERY_FILE_CHUNK_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB)); + this.translogOps = settings.getAsInt(INDICES_RECOVERY_TRANSLOG_OPS, 1000); + this.translogSize = settings.getAsBytesSize(INDICES_RECOVERY_TRANSLOG_SIZE, new ByteSizeValue(512, ByteSizeUnit.KB)); this.compress = settings.getAsBoolean(INDICES_RECOVERY_COMPRESS, true); this.retryDelayStateSync = settings.getAsTime(INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, TimeValue.timeValueMillis(500)); @@ -124,14 +118,14 @@ public class RecoverySettings extends AbstractComponent implements Closeable { ); - this.concurrentStreams = settings.getAsInt("indices.recovery.concurrent_streams", settings.getAsInt("index.shard.recovery.concurrent_streams", 3)); + this.concurrentStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_STREAMS, 3); this.concurrentStreamPool = EsExecutors.newScaling("recovery_stream", 0, concurrentStreams, 60, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[recovery_stream]")); - this.concurrentSmallFileStreams = settings.getAsInt("indices.recovery.concurrent_small_file_streams", settings.getAsInt("index.shard.recovery.concurrent_small_file_streams", 2)); + this.concurrentSmallFileStreams = settings.getAsInt(INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 2); this.concurrentSmallFileStreamPool = EsExecutors.newScaling("small_file_recovery_stream", 0, concurrentSmallFileStreams, 60, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[small_file_recovery_stream]")); - this.maxBytesPerSec = settings.getAsBytesSize("indices.recovery.max_bytes_per_sec", settings.getAsBytesSize("indices.recovery.max_size_per_sec", new ByteSizeValue(40, ByteSizeUnit.MB))); + this.maxBytesPerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, new ByteSizeValue(40, ByteSizeUnit.MB)); if (maxBytesPerSec.bytes() <= 0) { rateLimiter = null; } else { @@ -206,7 +200,7 @@ public class RecoverySettings extends AbstractComponent implements Closeable { class ApplySettings implements NodeSettingsService.Listener { @Override public void onRefreshSettings(Settings settings) { - ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, settings.getAsBytesSize(INDICES_RECOVERY_MAX_SIZE_PER_SEC, RecoverySettings.this.maxBytesPerSec)); + ByteSizeValue maxSizePerSec = settings.getAsBytesSize(INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec); if (!Objects.equals(maxSizePerSec, RecoverySettings.this.maxBytesPerSec)) { logger.info("updating [{}] from [{}] to [{}]", INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.this.maxBytesPerSec, maxSizePerSec); RecoverySettings.this.maxBytesPerSec = maxSizePerSec; From be033f13623aab1d386264787d1c1736d1eb7d67 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 4 Dec 2015 17:15:49 +0100 Subject: [PATCH 04/57] Make MappedFieldType.checkTypeName part of MappedFieldType.checkCompatibility. --- .../index/mapper/FieldMapper.java | 9 --------- .../index/mapper/FieldTypeLookup.java | 18 ++++++------------ .../index/mapper/MappedFieldType.java | 6 ++++-- .../index/mapper/FieldTypeTestCase.java | 15 +++++++++------ .../mapper/update/UpdateMappingTests.java | 8 ++++---- 5 files changed, 23 insertions(+), 33 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index c277cdc4728..ced3f08b229 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -370,15 +370,6 @@ public abstract class FieldMapper extends Mapper { return; } FieldMapper fieldMergeWith = (FieldMapper) mergeWith; - List subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult? - fieldType().checkTypeName(fieldMergeWith.fieldType(), subConflicts); - if (subConflicts.isEmpty() == false) { - // return early if field types don't match - assert subConflicts.size() == 1; - mergeResult.addConflict(subConflicts.get(0)); - return; - } - multiFields.merge(mergeWith, mergeResult); if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index eaaa47c3bd2..da21e599cc9 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -154,12 +154,9 @@ class FieldTypeLookup implements Iterable { MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName()); if (ref != null) { List conflicts = new ArrayList<>(); - ref.get().checkTypeName(fieldMapper.fieldType(), conflicts); - if (conflicts.isEmpty()) { // only check compat if they are the same type - final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); - boolean strict = beStrict(type, types, updateAllTypes); - ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - } + final Set types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName()); + boolean strict = beStrict(type, types, updateAllTypes); + ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString()); } @@ -169,12 +166,9 @@ class FieldTypeLookup implements Iterable { MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName()); if (indexNameRef != null) { List conflicts = new ArrayList<>(); - indexNameRef.get().checkTypeName(fieldMapper.fieldType(), conflicts); - if (conflicts.isEmpty()) { // only check compat if they are the same type - final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); - boolean strict = beStrict(type, types, updateAllTypes); - indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); - } + final Set types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName()); + boolean strict = beStrict(type, types, updateAllTypes); + indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict); if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 2755ca1a4e8..32e749992e6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -229,9 +229,9 @@ public abstract class MappedFieldType extends FieldType { public abstract String typeName(); /** Checks this type is the same type as other. Adds a conflict if they are different. */ - public final void checkTypeName(MappedFieldType other, List conflicts) { + private final void checkTypeName(MappedFieldType other) { if (typeName().equals(other.typeName()) == false) { - conflicts.add("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); + throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]"); } else if (getClass() != other.getClass()) { throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName()); } @@ -243,6 +243,8 @@ public abstract class MappedFieldType extends FieldType { * Otherwise, only properties which must never change in an index are checked. */ public void checkCompatibility(MappedFieldType other, List conflicts, boolean strict) { + checkTypeName(other); + boolean indexed = indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index a45348d530c..ca0cbf194d6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -281,7 +281,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { public void testCheckTypeName() { final MappedFieldType fieldType = createNamedDefaultFieldType(); List conflicts = new ArrayList<>(); - fieldType.checkTypeName(fieldType, conflicts); + fieldType.checkCompatibility(fieldType, conflicts, random().nextBoolean()); // no exception assertTrue(conflicts.toString(), conflicts.isEmpty()); MappedFieldType bogus = new MappedFieldType() { @@ -291,7 +291,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { public String typeName() { return fieldType.typeName();} }; try { - fieldType.checkTypeName(bogus, conflicts); + fieldType.checkCompatibility(bogus, conflicts, random().nextBoolean()); fail("expected bad types exception"); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("Type names equal")); @@ -304,10 +304,13 @@ public abstract class FieldTypeTestCase extends ESTestCase { @Override public String typeName() { return "othertype";} }; - fieldType.checkTypeName(other, conflicts); - assertFalse(conflicts.isEmpty()); - assertTrue(conflicts.get(0).contains("cannot be changed from type")); - assertEquals(1, conflicts.size()); + try { + fieldType.checkCompatibility(other, conflicts, random().nextBoolean()); + fail(); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage(), e.getMessage().contains("cannot be changed from type")); + } + assertTrue(conflicts.toString(), conflicts.isEmpty()); } public void testCheckCompatibility() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index f0a8f5d079d..abf5f4819cd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -151,7 +151,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { fail(); } catch (IllegalArgumentException e) { // expected - assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } try { @@ -159,7 +159,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { fail(); } catch (IllegalArgumentException e) { // expected - assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); @@ -186,7 +186,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { fail(); } catch (IllegalArgumentException e) { // expected - assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } try { @@ -194,7 +194,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { fail(); } catch (IllegalArgumentException e) { // expected - assertTrue(e.getMessage().contains("conflicts with existing mapping in other types")); + assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } assertTrue(mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); From c1f7f8c03ce115f32972c8c6e1a6fb6044319017 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 7 Dec 2015 09:54:23 +0100 Subject: [PATCH 05/57] add settings to migration guide --- docs/reference/migration/migrate_3_0.asciidoc | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 822d8864f71..ba3159afc9f 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -180,6 +180,22 @@ Previously, there were three settings for the ping timeout: `discovery.zen.initi the only setting key for the ping timeout is now `discovery.zen.ping_timeout`. The default value for ping timeouts remains at three seconds. + +==== Recovery settings + +Recovery settings deprecated in 1.x have been removed: + + * `index.shard.recovery.translog_size` is superseded by `indices.recovery.translog_size` + * `index.shard.recovery.translog_ops` is superseded by `indices.recovery.translog_ops` + * `index.shard.recovery.file_chunk_size` is superseded by `indices.recovery.file_chunk_size` + * `index.shard.recovery.concurrent_streams` is superseded by `indices.recovery.concurrent_streams` + * `index.shard.recovery.concurrent_small_file_streams` is superseded by `indices.recovery.concurrent_small_file_streams` + * `indices.recovery.max_size_per_sec` is superseded by `indices.recovery.max_bytes_per_sec` + +If you are using any of these settings please take the time and review their purpose. All of the settings above are considered +_expert settings_ and should only be used if absolutely necessary. If you have set any of the above setting as persistent +cluster settings please use the settings update API and set their superseded keys accordingly. + [[breaking_30_mapping_changes]] === Mapping changes From aa69c4a20b106079e9f67dfd1ecaa5a8e05f8ba7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 26 Nov 2015 19:45:33 +0100 Subject: [PATCH 06/57] Add fromXContent method to HighlightBuilder For the search refactoring the HighlightBuilder needs a way to create new instances by parsing xContent. For bwc this PR start by moving over and slightly modifying the parsing from HighlighterParseElement and keeps parsing for top level highlighter and field options separate. Also adding tests for roundtrip of random builder (rendering it to xContent and parsing it and making sure the original builder properties are preserved) --- .../highlight/AbstractHighlighterBuilder.java | 4 +- .../search/highlight/HighlightBuilder.java | 180 +++++++++++++++++- .../highlight/HighlighterParseElement.java | 4 +- .../highlight/HighlightBuilderTests.java | 115 ++++++++++- 4 files changed, 289 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index b10e2e8f58f..79927ee7447 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -377,7 +377,7 @@ public abstract class AbstractHighlighterBuilder 0) { builder.field("options", options); @@ -506,4 +506,4 @@ public abstract class AbstractHighlighterBuilder preTagsList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + preTagsList.add(parser.text()); + } + highlightBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); + } else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) { + List postTagsList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + postTagsList.add(parser.text()); + } + highlightBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()])); + } else if ("fields".equals(topLevelFieldName)) { + highlightBuilder.useExplicitFieldOrder(true); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.START_OBJECT) { + String highlightFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + if (highlightFieldName != null) { + throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field"); + } + highlightFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + highlightBuilder.field(Field.fromXContent(highlightFieldName, parseContext)); + } + } + } else { + throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field"); + } + } + } + } else if (token.isValue()) { + if ("order".equals(topLevelFieldName)) { + highlightBuilder.order(parser.text()); + } else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) { + highlightBuilder.tagsSchema(parser.text()); + } else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) { + highlightBuilder.highlightFilter(parser.booleanValue()); + } else if ("fragment_size".equals(topLevelFieldName) || "fragmentSize".equals(topLevelFieldName)) { + highlightBuilder.fragmentSize(parser.intValue()); + } else if ("number_of_fragments".equals(topLevelFieldName) || "numberOfFragments".equals(topLevelFieldName)) { + highlightBuilder.numOfFragments(parser.intValue()); + } else if ("encoder".equals(topLevelFieldName)) { + highlightBuilder.encoder(parser.text()); + } else if ("require_field_match".equals(topLevelFieldName) || "requireFieldMatch".equals(topLevelFieldName)) { + highlightBuilder.requireFieldMatch(parser.booleanValue()); + } else if ("boundary_max_scan".equals(topLevelFieldName) || "boundaryMaxScan".equals(topLevelFieldName)) { + highlightBuilder.boundaryMaxScan(parser.intValue()); + } else if ("boundary_chars".equals(topLevelFieldName) || "boundaryChars".equals(topLevelFieldName)) { + highlightBuilder.boundaryChars(parser.text().toCharArray()); + } else if ("type".equals(topLevelFieldName)) { + highlightBuilder.highlighterType(parser.text()); + } else if ("fragmenter".equals(topLevelFieldName)) { + highlightBuilder.fragmenter(parser.text()); + } else if ("no_match_size".equals(topLevelFieldName) || "noMatchSize".equals(topLevelFieldName)) { + highlightBuilder.noMatchSize(parser.intValue()); + } else if ("force_source".equals(topLevelFieldName) || "forceSource".equals(topLevelFieldName)) { + highlightBuilder.forceSource(parser.booleanValue()); + } else if ("phrase_limit".equals(topLevelFieldName) || "phraseLimit".equals(topLevelFieldName)) { + highlightBuilder.phraseLimit(parser.intValue()); + } + } else if (token == XContentParser.Token.START_OBJECT && "options".equals(topLevelFieldName)) { + highlightBuilder.options(parser.map()); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("fields".equals(topLevelFieldName)) { + String highlightFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + highlightFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + highlightBuilder.field(Field.fromXContent(highlightFieldName, parseContext)); + } + } + } else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) { + highlightBuilder.highlightQuery(parseContext.parseInnerQueryBuilder()); + } + } + } + + if (highlightBuilder.preTags() != null && highlightBuilder.postTags() == null) { + throw new IllegalArgumentException("Highlighter global preTags are set, but global postTags are not set"); + } + return highlightBuilder; + } + + + public void innerXContent(XContentBuilder builder) throws IOException { // first write common options commonOptionsToXContent(builder); @@ -205,7 +315,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder preTagsList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + preTagsList.add(parser.text()); + } + field.preTags(preTagsList.toArray(new String[preTagsList.size()])); + } else if ("post_tags".equals(currentFieldName) || "postTags".equals(currentFieldName)) { + List postTagsList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + postTagsList.add(parser.text()); + } + field.postTags(postTagsList.toArray(new String[postTagsList.size()])); + } else if ("matched_fields".equals(currentFieldName) || "matchedFields".equals(currentFieldName)) { + List matchedFields = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + matchedFields.add(parser.text()); + } + field.matchedFields(matchedFields.toArray(new String[matchedFields.size()])); + } + } else if (token.isValue()) { + if ("fragment_size".equals(currentFieldName) || "fragmentSize".equals(currentFieldName)) { + field.fragmentSize(parser.intValue()); + } else if ("number_of_fragments".equals(currentFieldName) || "numberOfFragments".equals(currentFieldName)) { + field.numOfFragments(parser.intValue()); + } else if ("fragment_offset".equals(currentFieldName) || "fragmentOffset".equals(currentFieldName)) { + field.fragmentOffset(parser.intValue()); + } else if ("highlight_filter".equals(currentFieldName) || "highlightFilter".equals(currentFieldName)) { + field.highlightFilter(parser.booleanValue()); + } else if ("order".equals(currentFieldName)) { + field.order(parser.text()); + } else if ("require_field_match".equals(currentFieldName) || "requireFieldMatch".equals(currentFieldName)) { + field.requireFieldMatch(parser.booleanValue()); + } else if ("boundary_max_scan".equals(currentFieldName) || "boundaryMaxScan".equals(currentFieldName)) { + field.boundaryMaxScan(parser.intValue()); + } else if ("boundary_chars".equals(currentFieldName) || "boundaryChars".equals(currentFieldName)) { + field.boundaryChars(parser.text().toCharArray()); + } else if ("type".equals(currentFieldName)) { + field.highlighterType(parser.text()); + } else if ("fragmenter".equals(currentFieldName)) { + field.fragmenter(parser.text()); + } else if ("no_match_size".equals(currentFieldName) || "noMatchSize".equals(currentFieldName)) { + field.noMatchSize(parser.intValue()); + } else if ("force_source".equals(currentFieldName) || "forceSource".equals(currentFieldName)) { + field.forceSource(parser.booleanValue()); + } else if ("phrase_limit".equals(currentFieldName) || "phraseLimit".equals(currentFieldName)) { + field.phraseLimit(parser.intValue()); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("highlight_query".equals(currentFieldName) || "highlightQuery".equals(currentFieldName)) { + field.highlightQuery(parseContext.parseInnerQueryBuilder()); + } else if ("options".equals(currentFieldName)) { + field.options(parser.map()); + } + } + } + return field; + } + @Override protected int doHashCode() { return Objects.hash(name, fragmentOffset, Arrays.hashCode(matchedFields)); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java index fdf9e2c26dd..4617fa57e66 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java @@ -94,7 +94,7 @@ public class HighlighterParseElement implements SearchParseElement { } } - public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException { + public static SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException { XContentParser.Token token; String topLevelFieldName = null; final List> fieldsOptions = new ArrayList<>(); @@ -211,7 +211,7 @@ public class HighlighterParseElement implements SearchParseElement { return new SearchContextHighlight(fields); } - protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException { + private static SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException { XContentParser.Token token; final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index cefc232fddb..70e6574b694 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -23,10 +23,23 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.IdsQueryParser; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; @@ -35,8 +48,10 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -45,23 +60,26 @@ public class HighlightBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; + private static IndicesQueriesRegistry indicesQueriesRegistry; /** * setup for the whole base test class */ @BeforeClass public static void init() { - if (namedWriteableRegistry == null) { - namedWriteableRegistry = new NamedWriteableRegistry(); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, new MatchAllQueryBuilder()); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, new IdsQueryBuilder()); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, new TermQueryBuilder("field", "value")); - } + namedWriteableRegistry = new NamedWriteableRegistry(); + @SuppressWarnings("rawtypes") + Set injectedQueryParsers = new HashSet<>(); + injectedQueryParsers.add(new MatchAllQueryParser()); + injectedQueryParsers.add(new IdsQueryParser()); + injectedQueryParsers.add(new TermQueryParser()); + indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; + indicesQueriesRegistry = null; } /** @@ -107,6 +125,83 @@ public class HighlightBuilderTests extends ESTestCase { } } + /** + * Generic test that creates new highlighter from the test highlighter and checks both for equality + */ + public void testFromXContent() throws IOException { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + HighlightBuilder highlightBuilder = randomHighlighterBuilder(); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + highlightBuilder.innerXContent(builder); + builder.endObject(); + + XContentParser parser = XContentHelper.createParser(builder.bytes()); + context.reset(parser); + HighlightBuilder secondHighlightBuilder = HighlightBuilder.fromXContent(context); + assertNotSame(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); + } + } + + /** + * `tags_schema` is not produced by toXContent in the builder but should be parseable, so this + * adds a simple json test for this. + */ + public void testParsingTagsSchema() throws IOException { + QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + String highlightElement = "{\n" + + " \"tags_schema\" : \"styled\"\n" + + "}\n"; + XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); + + context.reset(parser); + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlighterParseElement.STYLED_PRE_TAG, + highlightBuilder.preTags()); + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlighterParseElement.STYLED_POST_TAGS, + highlightBuilder.postTags()); + + highlightElement = "{\n" + + " \"tags_schema\" : \"default\"\n" + + "}\n"; + parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); + + context.reset(parser); + highlightBuilder = HighlightBuilder.fromXContent(context); + assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlighterParseElement.DEFAULT_PRE_TAGS, + highlightBuilder.preTags()); + assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlighterParseElement.DEFAULT_POST_TAGS, + highlightBuilder.postTags()); + + highlightElement = "{\n" + + " \"tags_schema\" : \"somthing_else\"\n" + + "}\n"; + parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); + + context.reset(parser); + try { + highlightBuilder = HighlightBuilder.fromXContent(context); + fail("setting unknown tag schema should throw exception"); + } catch (IllegalArgumentException e) { + assertEquals("Unknown tag schema [somthing_else]", e.getMessage()); + } + } + + protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(contentType); + if (randomBoolean()) { + builder.prettyPrint(); + } + highlight.toXContent(builder, ToXContent.EMPTY_PARAMS); + return builder; + } + /** * create random shape that is put under test */ @@ -132,11 +227,11 @@ public class HighlightBuilderTests extends ESTestCase { return testHighlighter; } + @SuppressWarnings("rawtypes") private static void setRandomCommonOptions(AbstractHighlighterBuilder highlightBuilder) { if (randomBoolean()) { + // need to set this together, otherwise parsing will complain highlightBuilder.preTags(randomStringArray(0, 3)); - } - if (randomBoolean()) { highlightBuilder.postTags(randomStringArray(0, 3)); } if (randomBoolean()) { @@ -213,7 +308,7 @@ public class HighlightBuilderTests extends ESTestCase { } } - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) private static void mutateCommonOptions(AbstractHighlighterBuilder highlightBuilder) { switch (randomIntBetween(1, 16)) { case 1: @@ -242,6 +337,7 @@ public class HighlightBuilderTests extends ESTestCase { break; case 9: highlightBuilder.highlightFilter(toggleOrSet(highlightBuilder.highlightFilter())); + break; case 10: highlightBuilder.forceSource(toggleOrSet(highlightBuilder.forceSource())); break; @@ -316,6 +412,7 @@ public class HighlightBuilderTests extends ESTestCase { fieldToChange.matchedFields(randomStringArray(5, 10)); } } + break; } } return mutation; From 8558a408943e77773f06619b0e98f1a59731ba6e Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 3 Dec 2015 15:31:52 +0100 Subject: [PATCH 07/57] Fix copy_to when the target is a dynamic object field. Fixes #11237 --- .../index/mapper/DocumentParser.java | 82 ++++++---- .../copyto/CopyToMapperIntegrationIT.java | 20 +++ .../mapper/copyto/CopyToMapperTests.java | 150 +++++++++++++++++- 3 files changed, 215 insertions(+), 37 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index de4dc387c88..805607096cf 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ReleasableLock; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType; @@ -47,7 +45,6 @@ import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; /** A parser for documents, given mappings from a DocumentMapper */ @@ -712,37 +709,64 @@ class DocumentParser implements Closeable { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); + String[] paths = Strings.splitStringToArray(field, '.'); + String fieldName = paths[paths.length-1]; ObjectMapper mapper = context.root(); - String objectPath = ""; - String fieldPath = field; - int posDot = field.lastIndexOf('.'); - if (posDot > 0) { - objectPath = field.substring(0, posDot); - context.path().add(objectPath); - mapper = context.docMapper().objectMappers().get(objectPath); - fieldPath = field.substring(posDot + 1); + ObjectMapper[] mappers = new ObjectMapper[paths.length-1]; + if (paths.length > 1) { + ObjectMapper parent = context.root(); + for (int i = 0; i < paths.length-1; i++) { + mapper = context.docMapper().objectMappers().get(context.path().fullPathAsText(paths[i])); + if (mapper == null) { + // One mapping is missing, check if we are allowed to create a dynamic one. + ObjectMapper.Dynamic dynamic = parent.dynamic(); + if (dynamic == null) { + dynamic = dynamicOrDefault(context.root().dynamic()); + } + + switch (dynamic) { + case STRICT: + throw new StrictDynamicMappingException(parent.fullPath(), paths[i]); + case TRUE: + Mapper.Builder builder = context.root().findTemplateBuilder(context, paths[i], "object"); + if (builder == null) { + // if this is a non root object, then explicitly set the dynamic behavior if set + if (!(parent instanceof RootObjectMapper) && parent.dynamic() != ObjectMapper.Defaults.DYNAMIC) { + ((ObjectMapper.Builder) builder).dynamic(parent.dynamic()); + } + builder = MapperBuilders.object(paths[i]).enabled(true).pathType(parent.pathType()); + } + Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); + mapper = (ObjectMapper) builder.build(builderContext); + if (mapper.nested() != ObjectMapper.Nested.NO) { + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().fullPathAsText(paths[i]) + "]) through `copy_to`"); + } + break; + case FALSE: + // Maybe we should log something to tell the user that the copy_to is ignored in this case. + break; + default: + throw new AssertionError("Unexpected dynamic type " + dynamic); + + } + } + context.path().add(paths[i]); + mappers[i] = mapper; + parent = mapper; + } } - if (mapper == null) { - //TODO: Create an object dynamically? - throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]"); - } - ObjectMapper update = parseDynamicValue(context, mapper, fieldPath, context.parser().currentToken()); + ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping - // propagate the update to the root - while (objectPath.length() > 0) { - String parentPath = ""; - ObjectMapper parent = context.root(); - posDot = objectPath.lastIndexOf('.'); - if (posDot > 0) { - parentPath = objectPath.substring(0, posDot); - parent = context.docMapper().objectMappers().get(parentPath); + if (paths.length > 1) { + for (int i = paths.length - 2; i >= 0; i--) { + ObjectMapper parent = context.root(); + if (i > 0) { + parent = mappers[i-1]; + } + assert parent != null; + update = parent.mappingUpdate(update); } - if (parent == null) { - throw new IllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]"); - } - update = parent.mappingUpdate(update); - objectPath = parentPath; } context.addDynamicMappingsUpdate(update); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperIntegrationIT.java index 1d6e72834c3..4a010747624 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperIntegrationIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -68,6 +69,25 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase { } + public void testDynamicObjectCopyTo() throws Exception { + String mapping = jsonBuilder().startObject().startObject("doc").startObject("properties") + .startObject("foo") + .field("type", "string") + .field("copy_to", "root.top.child") + .endObject() + .endObject().endObject().endObject().string(); + assertAcked( + client().admin().indices().prepareCreate("test-idx") + .addMapping("doc", mapping) + ); + client().prepareIndex("test-idx", "doc", "1") + .setSource("foo", "bar") + .get(); + client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); + SearchResponse response = client().prepareSearch("test-idx") + .setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); + assertThat(response.getHits().totalHits(), equalTo(1L)); + } private XContentBuilder createDynamicTemplateMapping() throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("doc") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index 301d6b13e3b..d94ae2b6735 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -167,27 +167,126 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { } - public void testCopyToFieldsNonExistingInnerObjectParsing() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties") - + public void testCopyToDynamicInnerObjectParsing() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type1") + .startObject("properties") .startObject("copy_test") - .field("type", "string") - .field("copy_to", "very.inner.field") + .field("type", "string") + .field("copy_to", "very.inner.field") .endObject() - - .endObject().endObject().endObject().string(); + .endObject() + .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); BytesReference json = jsonBuilder().startObject() .field("copy_test", "foo") + .field("new_field", "bar") .endObject().bytes(); + ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc(); + assertThat(doc.getFields("copy_test").length, equalTo(1)); + assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); + + assertThat(doc.getFields("very.inner.field").length, equalTo(1)); + assertThat(doc.getFields("very.inner.field")[0].stringValue(), equalTo("foo")); + + assertThat(doc.getFields("new_field").length, equalTo(1)); + assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar")); + } + + public void testCopyToDynamicInnerInnerObjectParsing() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("copy_test") + .field("type", "string") + .field("copy_to", "very.far.inner.field") + .endObject() + .startObject("very") + .field("type", "object") + .startObject("properties") + .startObject("far") + .field("type", "object") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + + BytesReference json = jsonBuilder().startObject() + .field("copy_test", "foo") + .field("new_field", "bar") + .endObject().bytes(); + + ParseContext.Document doc = docMapper.parse("test", "type1", "1", json).rootDoc(); + assertThat(doc.getFields("copy_test").length, equalTo(1)); + assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo")); + + assertThat(doc.getFields("very.far.inner.field").length, equalTo(1)); + assertThat(doc.getFields("very.far.inner.field")[0].stringValue(), equalTo("foo")); + + assertThat(doc.getFields("new_field").length, equalTo(1)); + assertThat(doc.getFields("new_field")[0].stringValue(), equalTo("bar")); + } + + public void testCopyToStrictDynamicInnerObjectParsing() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type1") + .field("dynamic", "strict") + .startObject("properties") + .startObject("copy_test") + .field("type", "string") + .field("copy_to", "very.inner.field") + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + + BytesReference json = jsonBuilder().startObject() + .field("copy_test", "foo") + .endObject().bytes(); + try { docMapper.parse("test", "type1", "1", json).rootDoc(); fail(); } catch (MapperParsingException ex) { - assertThat(ex.getMessage(), startsWith("attempt to copy value to non-existing object")); + assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed")); + } + } + + public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type1") + .startObject("properties") + .startObject("copy_test") + .field("type", "string") + .field("copy_to", "very.far.field") + .endObject() + .startObject("very") + .field("type", "object") + .startObject("properties") + .startObject("far") + .field("type", "object") + .field("dynamic", "strict") + .endObject() + .endObject() + .endObject() + + .endObject() + .endObject().endObject().string(); + + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + + BytesReference json = jsonBuilder().startObject() + .field("copy_test", "foo") + .endObject().bytes(); + + try { + docMapper.parse("test", "type1", "1", json).rootDoc(); + fail(); + } catch (MapperParsingException ex) { + assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed")); } } @@ -337,6 +436,41 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { } } + public void testCopyToDynamicNestedObjectParsing() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type1") + .startArray("dynamic_templates") + .startObject() + .startObject("objects") + .field("match_mapping_type", "object") + .startObject("mapping") + .field("type", "nested") + .endObject() + .endObject() + .endObject() + .endArray() + .startObject("properties") + .startObject("copy_test") + .field("type", "string") + .field("copy_to", "very.inner.field") + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + + BytesReference json = jsonBuilder().startObject() + .field("copy_test", "foo") + .field("new_field", "bar") + .endObject().bytes(); + + try { + docMapper.parse("test", "type1", "1", json).rootDoc(); + fail(); + } catch (MapperParsingException ex) { + assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`")); + } + } + private void assertFieldValue(Document doc, String field, Number... expected) { IndexableField[] values = doc.getFields(field); if (values == null) { From 914486795f0c97cc0167141d15a4f10cd3696089 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 7 Dec 2015 08:18:57 -0500 Subject: [PATCH 08/57] Addtional simplifications in IndexingMemoryController This commit removes some unneeded null checks from IndexingMemoryController that were left over from the work in #15251, and simplifies the try-catch block in IndexingMemoryController#updateShardBuffers. --- .../memory/IndexingMemoryController.java | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index d9c08431ef8..9a78d8b2c1f 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -19,7 +19,6 @@ package org.elasticsearch.indices.memory; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -213,23 +212,19 @@ public class IndexingMemoryController extends AbstractLifecycleComponent Date: Mon, 7 Dec 2015 14:56:05 +0100 Subject: [PATCH 09/57] Switching HighlighterBuilder to use ParseFields --- .../highlight/AbstractHighlighterBuilder.java | 55 +++++++---- .../search/highlight/HighlightBuilder.java | 95 ++++++++++--------- .../highlight/HighlighterParseElement.java | 6 +- .../highlight/HighlightBuilderTests.java | 3 + 4 files changed, 94 insertions(+), 65 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index 79927ee7447..2081f659fd9 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight; import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,6 +38,28 @@ import java.util.Objects; */ public abstract class AbstractHighlighterBuilder { + public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags"); + public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags"); + public static final ParseField FIELDS_FIELD = new ParseField("fields"); + public static final ParseField ORDER_FIELD = new ParseField("order"); + public static final ParseField TAGS_SCHEMA_FIELD = new ParseField("tags_schema"); + public static final ParseField HIGHLIGHT_FILTER_FIELD = new ParseField("highlight_filter"); + public static final ParseField FRAGMENT_SIZE_FIELD = new ParseField("fragment_size"); + public static final ParseField FRAGMENT_OFFSET_FIELD = new ParseField("fragment_offset"); + public static final ParseField NUMBER_OF_FRAGMENTS_FIELD = new ParseField("number_of_fragments"); + public static final ParseField ENCODER_FIELD = new ParseField("encoder"); + public static final ParseField REQUIRE_FIELD_MATCH_FIELD = new ParseField("require_field_match"); + public static final ParseField BOUNDARY_MAX_SCAN_FIELD = new ParseField("boundary_max_scan"); + public static final ParseField BOUNDARY_CHARS_FIELD = new ParseField("boundary_chars"); + public static final ParseField TYPE_FIELD = new ParseField("type"); + public static final ParseField FRAGMENTER_FIELD = new ParseField("fragmenter"); + public static final ParseField NO_MATCH_SIZE_FIELD = new ParseField("no_match_size"); + public static final ParseField FORCE_SOURCE_FIELD = new ParseField("force_source"); + public static final ParseField PHRASE_LIMIT_FIELD = new ParseField("phrase_limit"); + public static final ParseField OPTIONS_FIELD = new ParseField("options"); + public static final ParseField HIGHLIGHT_QUERY_FIELD = new ParseField("highlight_query"); + public static final ParseField MATCHED_FIELDS_FIELD = new ParseField("matched_fields"); + protected String[] preTags; protected String[] postTags; @@ -347,52 +370,52 @@ public abstract class AbstractHighlighterBuilder 0) { - builder.field("options", options); + builder.field(OPTIONS_FIELD.getPreferredName(), options); } if (forceSource != null) { - builder.field("force_source", forceSource); + builder.field(FORCE_SOURCE_FIELD.getPreferredName(), forceSource); } if (requireFieldMatch != null) { - builder.field("require_field_match", requireFieldMatch); + builder.field(REQUIRE_FIELD_MATCH_FIELD.getPreferredName(), requireFieldMatch); } if (noMatchSize != null) { - builder.field("no_match_size", noMatchSize); + builder.field(NO_MATCH_SIZE_FIELD.getPreferredName(), noMatchSize); } if (phraseLimit != null) { - builder.field("phrase_limit", phraseLimit); + builder.field(PHRASE_LIMIT_FIELD.getPreferredName(), phraseLimit); } } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index 8a55a079735..d21888fccfd 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -45,6 +45,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder fields = new ArrayList<>(); private String encoder; @@ -166,7 +168,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder preTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { preTagsList.add(parser.text()); } highlightBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); - } else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) { + } else if (parseContext.parseFieldMatcher().match(topLevelFieldName, POST_TAGS_FIELD)) { List postTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { postTagsList.add(parser.text()); } highlightBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()])); - } else if ("fields".equals(topLevelFieldName)) { + } else if (parseContext.parseFieldMatcher().match(topLevelFieldName, FIELDS_FIELD)) { highlightBuilder.useExplicitFieldOrder(true); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { @@ -225,39 +227,40 @@ public class HighlightBuilder extends AbstractHighlighterBuilder 0) { if (useExplicitFieldOrder) { - builder.startArray("fields"); + builder.startArray(FIELDS_FIELD.getPreferredName()); } else { - builder.startObject("fields"); + builder.startObject(FIELDS_FIELD.getPreferredName()); } for (Field field : fields) { if (useExplicitFieldOrder) { @@ -396,10 +399,10 @@ public class HighlightBuilder extends AbstractHighlighterBuilder preTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { preTagsList.add(parser.text()); } field.preTags(preTagsList.toArray(new String[preTagsList.size()])); - } else if ("post_tags".equals(currentFieldName) || "postTags".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, POST_TAGS_FIELD)) { List postTagsList = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { postTagsList.add(parser.text()); } field.postTags(postTagsList.toArray(new String[postTagsList.size()])); - } else if ("matched_fields".equals(currentFieldName) || "matchedFields".equals(currentFieldName)) { + } else if (parseContext.parseFieldMatcher().match(currentFieldName, MATCHED_FIELDS_FIELD)) { List matchedFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { matchedFields.add(parser.text()); @@ -434,37 +437,37 @@ public class HighlightBuilder extends AbstractHighlighterBuilderfvh should provide highlighting on filter clauses */ public static final boolean DEFAULT_HIGHLIGHT_FILTER = false; - /** default for highlight fragments being ordered by score */ + /** default for highlight fragments being ordered by score */ public static final boolean DEFAULT_SCORE_ORDERED = false; /** the default encoder setting */ public static final String DEFAULT_ENCODER = "default"; @@ -74,7 +74,7 @@ public class HighlighterParseElement implements SearchParseElement { public static final String[] DEFAULT_PRE_TAGS = new String[]{""}; /** the default closing tag */ public static final String[] DEFAULT_POST_TAGS = new String[]{""}; - + /** the default opening tags when tag_schema = "styled" */ public static final String[] STYLED_PRE_TAG = { "", "", "", @@ -94,7 +94,7 @@ public class HighlighterParseElement implements SearchParseElement { } } - public static SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException { + public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException { XContentParser.Token token; String topLevelFieldName = null; final List> fieldsOptions = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 70e6574b694..6ece2add646 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.highlight; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -130,6 +131,7 @@ public class HighlightBuilderTests extends ESTestCase { */ public void testFromXContent() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); @@ -155,6 +157,7 @@ public class HighlightBuilderTests extends ESTestCase { */ public void testParsingTagsSchema() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); + context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); String highlightElement = "{\n" + " \"tags_schema\" : \"styled\"\n" + "}\n"; From c57672c9b3012a420bd2f4895cb9b3bbbf49bb78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 7 Dec 2015 16:21:36 +0100 Subject: [PATCH 10/57] Adding checks and tests for exceptions on unknown fieldnames --- .../highlight/AbstractHighlighterBuilder.java | 8 +- .../search/highlight/HighlightBuilder.java | 26 ++-- .../highlight/HighlightBuilderTests.java | 114 +++++++++++++++++- 3 files changed, 136 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index 2081f659fd9..e1818053596 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -36,7 +36,7 @@ import java.util.Objects; * This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field} * and provides the common setters, equality, hashCode calculation and common serialization */ -public abstract class AbstractHighlighterBuilder { +public abstract class AbstractHighlighterBuilder> { public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags"); public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags"); @@ -72,7 +72,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery; protected String order; @@ -198,7 +198,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery) { this.highlightQuery = highlightQuery; return (HB) this; } @@ -206,7 +206,7 @@ public abstract class AbstractHighlighterBuilder highlightQuery() { return this.highlightQuery; } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index d21888fccfd..c9479aa72fb 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.highlight; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -214,7 +215,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder Date: Mon, 7 Dec 2015 19:15:15 +0100 Subject: [PATCH 11/57] Mapper parsers should not check for a `tokenized` property. I don't recall of this property of any of our field mappers and it's not in our docs so I suspect it's very old. The removal of this property will not fail version upgrades since none of the field mappers use it in toXContent. --- .../java/org/elasticsearch/index/mapper/core/TypeParsers.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 0bb0b213f64..c0e0cef0f0c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -197,9 +197,6 @@ public class TypeParsers { } else if (propName.equals("index")) { parseIndex(name, propNode.toString(), builder); iterator.remove(); - } else if (propName.equals("tokenized")) { - builder.tokenized(nodeBooleanValue(propNode)); - iterator.remove(); } else if (propName.equals(DOC_VALUES)) { builder.docValues(nodeBooleanValue(propNode)); iterator.remove(); From 7dcb40bcacae2b44174f2f1cb645ee5f13e30f45 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Mon, 7 Dec 2015 23:06:11 +0100 Subject: [PATCH 12/57] Add support for proxy authentication for s3 and ec2 When using S3 or EC2, it was possible to use a proxy to access EC2 or S3 API but username and password were not possible to be set. This commit adds support for this. Also, to make all that consistent, proxy settings for both plugins have been renamed: * from `cloud.aws.proxy_host` to `cloud.aws.proxy.host` * from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host` * from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host` * from `cloud.aws.proxy_port` to `cloud.aws.proxy.port` * from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port` * from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port` New settings are `proxy.username` and `proxy.password`. ```yml cloud: aws: protocol: https proxy: host: proxy1.company.com port: 8083 username: myself password: theBestPasswordEver! ``` You can also set different proxies for `ec2` and `s3`: ```yml cloud: aws: s3: proxy: host: proxy1.company.com port: 8083 username: myself1 password: theBestPasswordEver1! ec2: proxy: host: proxy2.company.com port: 8083 username: myself2 password: theBestPasswordEver2! ``` Note that `password` is filtered with `SettingsFilter`. We also fix a potential issue in S3 repository. We were supposed to accept key/secret either set under `cloud.aws` or `cloud.aws.s3` but the actual code never implemented that. It was: ```java account = settings.get("cloud.aws.access_key"); key = settings.get("cloud.aws.secret_key"); ``` We replaced that by: ```java String account = settings.get(CLOUD_S3.KEY, settings.get(CLOUD_AWS.KEY)); String key = settings.get(CLOUD_S3.SECRET, settings.get(CLOUD_AWS.SECRET)); ``` Also, we extract all settings for S3 in `AwsS3Service` as it's already the case for `AwsEc2Service` class. Closes #15268. --- docs/plugins/discovery-ec2.asciidoc | 25 +++++++--- docs/plugins/repository-s3.asciidoc | 25 +++++++--- docs/reference/migration/migrate_3_0.asciidoc | 9 ++++ plugins/discovery-ec2/build.gradle | 2 +- .../cloud/aws/AwsEc2Service.java | 20 ++++++-- .../cloud/aws/AwsEc2ServiceImpl.java | 17 +++++-- .../elasticsearch/cloud/aws/AwsS3Service.java | 50 +++++++++++++++++++ .../cloud/aws/InternalAwsS3Service.java | 48 +++++++++++------- .../repositories/s3/S3Repository.java | 28 ++++++----- 9 files changed, 168 insertions(+), 56 deletions(-) diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index a2b80495003..bdd46fb72fd 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -64,16 +64,19 @@ cloud: protocol: https ---- -In addition, a proxy can be configured with the `proxy_host` and `proxy_port` settings (note that protocol can be -`http` or `https`): +In addition, a proxy can be configured with the `proxy.host`, `proxy.port`, `proxy.username` and `proxy.password` settings +(note that protocol can be `http` or `https`): [source,yaml] ---- cloud: aws: protocol: https - proxy_host: proxy1.company.com - proxy_port: 8083 + proxy: + host: proxy1.company.com + port: 8083 + username: myself + password: theBestPasswordEver! ---- You can also set different proxies for `ec2` and `s3`: @@ -83,11 +86,17 @@ You can also set different proxies for `ec2` and `s3`: cloud: aws: s3: - proxy_host: proxy1.company.com - proxy_port: 8083 + proxy: + host: proxy1.company.com + port: 8083 + username: myself1 + password: theBestPasswordEver1! ec2: - proxy_host: proxy2.company.com - proxy_port: 8083 + proxy: + host: proxy2.company.com + port: 8083 + username: myself2 + password: theBestPasswordEver2! ---- [[discovery-ec2-usage-region]] diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 16505885446..faaa87302ee 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -67,16 +67,19 @@ cloud: protocol: https ---- -In addition, a proxy can be configured with the `proxy_host` and `proxy_port` settings (note that protocol can be -`http` or `https`): +In addition, a proxy can be configured with the `proxy.host`, `proxy.port`, `proxy.username` and `proxy.password` settings +(note that protocol can be `http` or `https`): [source,yaml] ---- cloud: aws: protocol: https - proxy_host: proxy1.company.com - proxy_port: 8083 + proxy: + host: proxy1.company.com + port: 8083 + username: myself + password: theBestPasswordEver! ---- You can also set different proxies for `ec2` and `s3`: @@ -86,11 +89,17 @@ You can also set different proxies for `ec2` and `s3`: cloud: aws: s3: - proxy_host: proxy1.company.com - proxy_port: 8083 + proxy: + host: proxy1.company.com + port: 8083 + username: myself1 + password: theBestPasswordEver1! ec2: - proxy_host: proxy2.company.com - proxy_port: 8083 + proxy: + host: proxy2.company.com + port: 8083 + username: myself2 + password: theBestPasswordEver2! ---- [[repository-s3-usage-region]] diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index b8683bc6fd0..2b58303919c 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -237,6 +237,15 @@ Cloud AWS plugin has been split in two plugins: * {plugins}/discovery-ec2.html[Discovery EC2 plugin] * {plugins}/repository-s3.html[Repository S3 plugin] +Proxy settings for both plugins have been renamed: + +* from `cloud.aws.proxy_host` to `cloud.aws.proxy.host` +* from `cloud.aws.ec2.proxy_host` to `cloud.aws.ec2.proxy.host` +* from `cloud.aws.s3.proxy_host` to `cloud.aws.s3.proxy.host` +* from `cloud.aws.proxy_port` to `cloud.aws.proxy.port` +* from `cloud.aws.ec2.proxy_port` to `cloud.aws.ec2.proxy.port` +* from `cloud.aws.s3.proxy_port` to `cloud.aws.s3.proxy.port` + ==== Cloud Azure plugin changes Cloud Azure plugin has been split in three plugins: diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 25706619c15..77cfd6626d5 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -42,7 +42,7 @@ dependencyLicenses { mapping from: /jackson-.*/, to: 'jackson' } -compileJava.options.compilerArgs << '-Xlint:-rawtypes' +compileJava.options.compilerArgs << '-Xlint:-rawtypes,-deprecation' test { // this is needed for insecure plugins, remove if possible! diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index a427b4af4ab..d71d9dfb0af 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -27,20 +27,32 @@ public interface AwsEc2Service extends LifecycleComponent { public static final String KEY = "cloud.aws.access_key"; public static final String SECRET = "cloud.aws.secret_key"; public static final String PROTOCOL = "cloud.aws.protocol"; - public static final String PROXY_HOST = "cloud.aws.proxy_host"; - public static final String PROXY_PORT = "cloud.aws.proxy_port"; + public static final String PROXY_HOST = "cloud.aws.proxy.host"; + public static final String PROXY_PORT = "cloud.aws.proxy.port"; + public static final String PROXY_USERNAME = "cloud.aws.proxy.username"; + public static final String PROXY_PASSWORD = "cloud.aws.proxy.password"; public static final String SIGNER = "cloud.aws.signer"; public static final String REGION = "cloud.aws.region"; + @Deprecated + public static final String DEPRECATED_PROXY_HOST = "cloud.aws.proxy_host"; + @Deprecated + public static final String DEPRECATED_PROXY_PORT = "cloud.aws.proxy_port"; } final class CLOUD_EC2 { public static final String KEY = "cloud.aws.ec2.access_key"; public static final String SECRET = "cloud.aws.ec2.secret_key"; public static final String PROTOCOL = "cloud.aws.ec2.protocol"; - public static final String PROXY_HOST = "cloud.aws.ec2.proxy_host"; - public static final String PROXY_PORT = "cloud.aws.ec2.proxy_port"; + public static final String PROXY_HOST = "cloud.aws.ec2.proxy.host"; + public static final String PROXY_PORT = "cloud.aws.ec2.proxy.port"; + public static final String PROXY_USERNAME = "cloud.aws.ec2.proxy.username"; + public static final String PROXY_PASSWORD = "cloud.aws.ec2.proxy.password"; public static final String SIGNER = "cloud.aws.ec2.signer"; public static final String ENDPOINT = "cloud.aws.ec2.endpoint"; + @Deprecated + public static final String DEPRECATED_PROXY_HOST = "cloud.aws.ec2.proxy_host"; + @Deprecated + public static final String DEPRECATED_PROXY_PORT = "cloud.aws.ec2.proxy_port"; } final class DISCOVERY_EC2 { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 76c3262db3f..ec1ffd54a77 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -56,8 +56,10 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent // Filter global settings settingsFilter.addFilter(CLOUD_AWS.KEY); settingsFilter.addFilter(CLOUD_AWS.SECRET); + settingsFilter.addFilter(CLOUD_AWS.PROXY_PASSWORD); settingsFilter.addFilter(CLOUD_EC2.KEY); settingsFilter.addFilter(CLOUD_EC2.SECRET); + settingsFilter.addFilter(CLOUD_EC2.PROXY_PASSWORD); // add specific ec2 name resolver networkService.addCustomNameResolver(new Ec2NameResolver(settings)); discoveryNodeService.addCustomAttributeProvider(new Ec2CustomNodeAttributes(settings)); @@ -83,16 +85,25 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent String account = settings.get(CLOUD_EC2.KEY, settings.get(CLOUD_AWS.KEY)); String key = settings.get(CLOUD_EC2.SECRET, settings.get(CLOUD_AWS.SECRET)); - String proxyHost = settings.get(CLOUD_EC2.PROXY_HOST, settings.get(CLOUD_AWS.PROXY_HOST)); + String proxyHost = settings.get(CLOUD_AWS.PROXY_HOST, settings.get(CLOUD_AWS.DEPRECATED_PROXY_HOST)); + proxyHost = settings.get(CLOUD_EC2.PROXY_HOST, settings.get(CLOUD_EC2.DEPRECATED_PROXY_HOST, proxyHost)); if (proxyHost != null) { - String portString = settings.get(CLOUD_EC2.PROXY_PORT, settings.get(CLOUD_AWS.PROXY_PORT, "80")); + String portString = settings.get(CLOUD_AWS.PROXY_PORT, settings.get(CLOUD_AWS.DEPRECATED_PROXY_PORT, "80")); + portString = settings.get(CLOUD_EC2.PROXY_PORT, settings.get(CLOUD_EC2.DEPRECATED_PROXY_PORT, portString)); Integer proxyPort; try { proxyPort = Integer.parseInt(portString, 10); } catch (NumberFormatException ex) { throw new IllegalArgumentException("The configured proxy port value [" + portString + "] is invalid", ex); } - clientConfiguration.withProxyHost(proxyHost).setProxyPort(proxyPort); + String proxyUsername = settings.get(CLOUD_EC2.PROXY_USERNAME, settings.get(CLOUD_AWS.PROXY_USERNAME)); + String proxyPassword = settings.get(CLOUD_EC2.PROXY_PASSWORD, settings.get(CLOUD_AWS.PROXY_PASSWORD)); + + clientConfiguration + .withProxyHost(proxyHost) + .withProxyPort(proxyPort) + .withProxyUsername(proxyUsername) + .withProxyPassword(proxyPassword); } // #155: we might have 3rd party users using older EC2 API version diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index e5db2ed7357..711b8db9374 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -26,6 +26,56 @@ import org.elasticsearch.common.component.LifecycleComponent; * */ public interface AwsS3Service extends LifecycleComponent { + + final class CLOUD_AWS { + public static final String KEY = "cloud.aws.access_key"; + public static final String SECRET = "cloud.aws.secret_key"; + public static final String PROTOCOL = "cloud.aws.protocol"; + public static final String PROXY_HOST = "cloud.aws.proxy.host"; + public static final String PROXY_PORT = "cloud.aws.proxy.port"; + public static final String PROXY_USERNAME = "cloud.aws.proxy.username"; + public static final String PROXY_PASSWORD = "cloud.aws.proxy.password"; + public static final String SIGNER = "cloud.aws.signer"; + public static final String REGION = "cloud.aws.region"; + @Deprecated + public static final String DEPRECATED_PROXY_HOST = "cloud.aws.proxy_host"; + @Deprecated + public static final String DEPRECATED_PROXY_PORT = "cloud.aws.proxy_port"; + } + + final class CLOUD_S3 { + public static final String KEY = "cloud.aws.s3.access_key"; + public static final String SECRET = "cloud.aws.s3.secret_key"; + public static final String PROTOCOL = "cloud.aws.s3.protocol"; + public static final String PROXY_HOST = "cloud.aws.s3.proxy.host"; + public static final String PROXY_PORT = "cloud.aws.s3.proxy.port"; + public static final String PROXY_USERNAME = "cloud.aws.s3.proxy.username"; + public static final String PROXY_PASSWORD = "cloud.aws.s3.proxy.password"; + public static final String SIGNER = "cloud.aws.s3.signer"; + public static final String ENDPOINT = "cloud.aws.s3.endpoint"; + @Deprecated + public static final String DEPRECATED_PROXY_HOST = "cloud.aws.s3.proxy_host"; + @Deprecated + public static final String DEPRECATED_PROXY_PORT = "cloud.aws.s3.proxy_port"; + } + + final class REPOSITORY_S3 { + public static final String BUCKET = "repositories.s3.bucket"; + public static final String ENDPOINT = "repositories.s3.endpoint"; + public static final String PROTOCOL = "repositories.s3.protocol"; + public static final String REGION = "repositories.s3.region"; + public static final String SERVER_SIDE_ENCRYPTION = "repositories.s3.server_side_encryption"; + public static final String BUFFER_SIZE = "repositories.s3.buffer_size"; + public static final String MAX_RETRIES = "repositories.s3.max_retries"; + public static final String CHUNK_SIZE = "repositories.s3.chunk_size"; + public static final String COMPRESS = "repositories.s3.compress"; + public static final String STORAGE_CLASS = "repositories.s3.storage_class"; + public static final String CANNED_ACL = "repositories.s3.canned_acl"; + public static final String BASE_PATH = "repositories.s3.base_path"; + } + + + AmazonS3 client(); AmazonS3 client(String endpoint, String protocol, String region, String account, String key); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 4752a3f80b2..7d0b72cd63c 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -50,8 +50,12 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent Date: Mon, 7 Dec 2015 22:07:20 -0500 Subject: [PATCH 13/57] Add missing license header to o.e.m.Probes --- .../org/elasticsearch/monitor/Probes.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/monitor/Probes.java b/core/src/main/java/org/elasticsearch/monitor/Probes.java index d20a3f58b27..e31903c0bce 100644 --- a/core/src/main/java/org/elasticsearch/monitor/Probes.java +++ b/core/src/main/java/org/elasticsearch/monitor/Probes.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.monitor; import java.lang.management.OperatingSystemMXBean; From 25d60e152fa4dfe57555388916e8125e8283653f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 7 Dec 2015 15:58:42 -0800 Subject: [PATCH 14/57] Tribe: Fix tribe node to load config file for internal client nodes The tribe node creates one local client node for each cluster it connects to. Refactorings in #13383 broke this so that each local client node now tries to load the full elasticsearch.yml that the real tribe node uses. This change fixes the problem by adding a TribeClientNode which is a subclass of Node. The Environment the node uses is now passed in (in place of Settings), and the TribeClientNode simply does not use InternalSettingsPreparer.prepareEnvironment. The tests around tribe nodes are not great. The existing tests pass, but I also manually tested by creating 2 local clusters, and configuring and starting a tribe node. With this I was able to see in the logs the tribe node connecting to each cluster. closes #13383 --- .../java/org/elasticsearch/node/Node.java | 11 +++--- .../elasticsearch/tribe/TribeClientNode.java | 37 +++++++++++++++++++ .../org/elasticsearch/tribe/TribeService.java | 5 ++- .../elasticsearch/tribe/TribeUnitTests.java | 5 +-- .../java/org/elasticsearch/node/MockNode.java | 3 +- 5 files changed, 49 insertions(+), 12 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/tribe/TribeClientNode.java diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index 04fc7e95565..3caff627530 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -128,14 +128,13 @@ public class Node implements Releasable { * @param preparedSettings Base settings to configure the node with */ public Node(Settings preparedSettings) { - this(preparedSettings, Version.CURRENT, Collections.>emptyList()); + this(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), Version.CURRENT, Collections.>emptyList()); } - Node(Settings preparedSettings, Version version, Collection> classpathPlugins) { - final Settings pSettings = settingsBuilder().put(preparedSettings) - .put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build(); - Environment tmpEnv = InternalSettingsPreparer.prepareEnvironment(pSettings, null); - Settings tmpSettings = TribeService.processSettings(tmpEnv.settings()); + protected Node(Environment tmpEnv, Version version, Collection> classpathPlugins) { + Settings tmpSettings = settingsBuilder().put(tmpEnv.settings()) + .put(Client.CLIENT_TYPE_SETTING, CLIENT_TYPE).build(); + tmpSettings = TribeService.processSettings(tmpSettings); ESLogger logger = Loggers.getLogger(Node.class, tmpSettings.get("name")); logger.info("version[{}], pid[{}], build[{}/{}]", version, JvmInfo.jvmInfo().pid(), Build.CURRENT.shortHash(), Build.CURRENT.date()); diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeClientNode.java b/core/src/main/java/org/elasticsearch/tribe/TribeClientNode.java new file mode 100644 index 00000000000..688dfe5a92d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tribe/TribeClientNode.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tribe; + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collections; + +/** + * An internal node that connects to a remove cluster, as part of a tribe node. + */ +class TribeClientNode extends Node { + TribeClientNode(Settings settings) { + super(new Environment(settings), Version.CURRENT, Collections.>emptyList()); + } +} diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 87da13fad4a..343606e7805 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -132,14 +132,15 @@ public class TribeService extends AbstractLifecycleComponent { nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client for (Map.Entry entry : nodesSettings.entrySet()) { Settings.Builder sb = Settings.builder().put(entry.getValue()); - sb.put("node.name", settings.get("name") + "/" + entry.getKey()); + sb.put("name", settings.get("name") + "/" + entry.getKey()); sb.put("path.home", settings.get("path.home")); // pass through ES home dir sb.put(TRIBE_NAME, entry.getKey()); sb.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); if (sb.get("http.enabled") == null) { sb.put("http.enabled", false); } - nodes.add(NodeBuilder.nodeBuilder().settings(sb).client(true).build()); + sb.put("node.client", true); + nodes.add(new TribeClientNode(sb.build())); } String[] blockIndicesWrite = Strings.EMPTY_ARRAY; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index c13b91b159a..f25bd87db19 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -54,13 +54,12 @@ public class TribeUnitTests extends ESTestCase { @BeforeClass public static void createTribes() { Settings baseSettings = Settings.builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) .put("http.enabled", false) .put("node.mode", NODE_MODE) .put("path.home", createTempDir()).build(); - tribe1 = NodeBuilder.nodeBuilder().settings(Settings.builder().put(baseSettings).put("cluster.name", "tribe1").put("node.name", "tribe1_node")).node(); - tribe2 = NodeBuilder.nodeBuilder().settings(Settings.builder().put(baseSettings).put("cluster.name", "tribe2").put("node.name", "tribe2_node")).node(); + tribe1 = new TribeClientNode(Settings.builder().put(baseSettings).put("cluster.name", "tribe1").put("name", "tribe1_node").build()).start(); + tribe2 = new TribeClientNode(Settings.builder().put(baseSettings).put("cluster.name", "tribe2").put("name", "tribe2_node").build()).start(); } @AfterClass diff --git a/test-framework/src/main/java/org/elasticsearch/node/MockNode.java b/test-framework/src/main/java/org/elasticsearch/node/MockNode.java index c5592fef48d..57dcc08f4fe 100644 --- a/test-framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test-framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -21,6 +21,7 @@ package org.elasticsearch.node; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import java.util.Collection; @@ -39,7 +40,7 @@ public class MockNode extends Node { private Collection> plugins; public MockNode(Settings settings, Version version, Collection> classpathPlugins) { - super(settings, version, classpathPlugins); + super(InternalSettingsPreparer.prepareEnvironment(settings, null), version, classpathPlugins); this.version = version; this.plugins = classpathPlugins; } From 82b502c21f9fe5912e706f9feff47ddfa5bd8be2 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 7 Dec 2015 16:36:31 +0100 Subject: [PATCH 15/57] Make IndexShard operation be more explicit about whether they are expected to run on a primary or replica This commit cherry picks some infrastructure changes from the `feature/seq_no` branch to make merging from master easier. More explicitly, IndexShard current have prepareIndex and prepareDelete methods that are called both on the primary as the replica, giving it a different origin parameter. Instead, this commits creates two explicit prepare*OnPrimary and prepare*OnReplica methods. This has the extra added value of not expecting the caller to use an Engine enum. Also, the commit adds some code reuse between TransportIndexAction and TransportDeleteAction and their TransportShardBulkAction counter parts. Closes #15282 --- .../action/bulk/TransportShardBulkAction.java | 42 ++----- .../action/delete/TransportDeleteAction.java | 28 +++-- .../action/index/TransportIndexAction.java | 56 ++++++++- .../TransportReplicationAction.java | 44 +------ .../cluster/routing/ShardRouting.java | 53 ++++---- .../elasticsearch/index/shard/IndexShard.java | 113 +++++++++++------- .../cluster/routing/ShardRoutingTests.java | 6 + 7 files changed, 191 insertions(+), 151 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a4565cf4cfc..e51a1b938d8 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -25,8 +25,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.update.UpdateHelper; @@ -49,8 +51,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; @@ -164,7 +164,7 @@ public class TransportShardBulkAction extends TransportReplicationAction writeResult = shardDeleteOperation(request, deleteRequest, indexShard); + final WriteResult writeResult = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard); DeleteResponse deleteResponse = writeResult.response(); location = locationToSync(location, writeResult.location); setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_DELETE, deleteResponse)); @@ -304,7 +304,7 @@ public class TransportShardBulkAction extends TransportReplicationAction shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState, IndexShard indexShard, boolean processed) throws Throwable { // validate, if routing is required, that we got routing @@ -334,21 +334,7 @@ public class TransportShardBulkAction extends TransportReplicationAction shardDeleteOperation(BulkShardRequest request, DeleteRequest deleteRequest, IndexShard indexShard) { - Engine.Delete delete = indexShard.prepareDelete(deleteRequest.type(), deleteRequest.id(), deleteRequest.version(), deleteRequest.versionType(), Engine.Operation.Origin.PRIMARY); - indexShard.delete(delete); - // update the request with the version so it will go to the replicas - deleteRequest.versionType(delete.versionType().versionTypeForReplicationAndRecovery()); - deleteRequest.version(delete.version()); - - assert deleteRequest.versionType().validateVersionForWrites(deleteRequest.version()); - - DeleteResponse deleteResponse = new DeleteResponse(request.index(), deleteRequest.type(), deleteRequest.id(), delete.version(), delete.found()); - return new WriteResult(deleteResponse, delete.getTranslogLocation()); + return TransportIndexAction.executeIndexRequestOnPrimary(indexRequest, indexShard, mappingUpdatedAction); } static class UpdateResult { @@ -424,7 +410,7 @@ public class TransportShardBulkAction extends TransportReplicationAction result = TransportDeleteAction.executeDeleteRequestOnPrimary(deleteRequest, indexShard); return new UpdateResult(translate, deleteRequest, result); } catch (Throwable t) { t = ExceptionsHelper.unwrapCause(t); @@ -457,15 +443,7 @@ public class TransportShardBulkAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) { DeleteRequest request = shardRequest.request; IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); - Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY); + final WriteResult result = executeDeleteRequestOnPrimary(request, indexShard); + processAfterWrite(request.refresh(), indexShard, result.location); + return new Tuple<>(result.response, shardRequest.request); + } + + public static WriteResult executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard indexShard) { + Engine.Delete delete = indexShard.prepareDeleteOnPrimary(request.type(), request.id(), request.version(), request.versionType()); indexShard.delete(delete); // update the request with the version so it will go to the replicas request.versionType(delete.versionType().versionTypeForReplicationAndRecovery()); request.version(delete.version()); assert request.versionType().validateVersionForWrites(request.version()); - processAfter(request.refresh(), indexShard, delete.getTranslogLocation()); - - DeleteResponse response = new DeleteResponse(shardRequest.shardId.getIndex(), request.type(), request.id(), delete.version(), delete.found()); - return new Tuple<>(response, shardRequest.request); + return new WriteResult<>( + new DeleteResponse(indexShard.shardId().getIndex(), request.type(), request.id(), delete.version(), delete.found()), + delete.getTranslogLocation()); } + public static Engine.Delete executeDeleteRequestOnReplica(DeleteRequest request, IndexShard indexShard) { + Engine.Delete delete = indexShard.prepareDeleteOnReplica(request.type(), request.id(), request.version(), request.versionType()); + indexShard.delete(delete); + return delete; + } + + @Override protected void shardOperationOnReplica(ShardId shardId, DeleteRequest request) { IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); - Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version(), request.versionType(), Engine.Operation.Origin.REPLICA); - - indexShard.delete(delete); - processAfter(request.refresh(), indexShard, delete.getTranslogLocation()); + Engine.Delete delete = executeDeleteRequestOnReplica(request, indexShard); + processAfterWrite(request.refresh(), indexShard, delete.getTranslogLocation()); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 3ffb5765e8c..06a417240d4 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -166,11 +166,11 @@ public class TransportIndexAction extends TransportReplicationAction result = executeIndexRequestOnPrimary(request, indexShard); + final WriteResult result = executeIndexRequestOnPrimary(request, indexShard, mappingUpdatedAction); final IndexResponse response = result.response; final Translog.Location location = result.location; - processAfter(request.refresh(), indexShard, location); + processAfterWrite(request.refresh(), indexShard, location); return new Tuple<>(response, shardRequest.request); } @@ -178,16 +178,64 @@ public class TransportIndexAction extends TransportReplicationAction executeIndexRequestOnPrimary(IndexRequest request, IndexShard indexShard, MappingUpdatedAction mappingUpdatedAction) throws Throwable { + Engine.Index operation = prepareIndexOperationOnPrimary(request, indexShard); + Mapping update = operation.parsedDoc().dynamicMappingsUpdate(); + final ShardId shardId = indexShard.shardId(); + if (update != null) { + final String indexName = shardId.getIndex(); + mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update); + operation = prepareIndexOperationOnPrimary(request, indexShard); + update = operation.parsedDoc().dynamicMappingsUpdate(); + if (update != null) { + throw new RetryOnPrimaryException(shardId, + "Dynamics mappings are not available on the node that holds the primary yet"); + } + } + final boolean created = indexShard.index(operation); + + // update the version on request so it will happen on the replicas + final long version = operation.version(); + request.version(version); + request.versionType(request.versionType().versionTypeForReplicationAndRecovery()); + + assert request.versionType().validateVersionForWrites(request.version()); + + return new WriteResult<>(new IndexResponse(shardId.getIndex(), request.type(), request.id(), request.version(), created), operation.getTranslogLocation()); } } + diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 62802910cdb..3b4d860f31e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -25,9 +25,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.WriteConsistencyLevel; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexRequest.OpType; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -55,10 +52,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; @@ -1071,43 +1065,7 @@ public abstract class TransportReplicationAction executeIndexRequestOnPrimary(IndexRequest request, IndexShard indexShard) throws Throwable { - Engine.Index operation = prepareIndexOperationOnPrimary(request, indexShard); - Mapping update = operation.parsedDoc().dynamicMappingsUpdate(); - final ShardId shardId = indexShard.shardId(); - if (update != null) { - final String indexName = shardId.getIndex(); - mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update); - operation = prepareIndexOperationOnPrimary(request, indexShard); - update = operation.parsedDoc().dynamicMappingsUpdate(); - if (update != null) { - throw new RetryOnPrimaryException(shardId, - "Dynamics mappings are not available on the node that holds the primary yet"); - } - } - final boolean created = indexShard.index(operation); - - // update the version on request so it will happen on the replicas - final long version = operation.version(); - request.version(version); - request.versionType(request.versionType().versionTypeForReplicationAndRecovery()); - - assert request.versionType().validateVersionForWrites(request.version()); - - return new WriteResult(new IndexResponse(shardId.getIndex(), request.type(), request.id(), request.version(), created), operation.getTranslogLocation()); - } - - protected final void processAfter(boolean refresh, IndexShard indexShard, Translog.Location location) { + protected final void processAfterWrite(boolean refresh, IndexShard indexShard, Translog.Location location) { if (refresh) { try { indexShard.refresh("refresh_flag_index"); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 7a9effac9b8..8dd71e3fba5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -215,7 +215,7 @@ public final class ShardRouting implements Streamable, ToXContent { public ShardRouting buildTargetRelocatingShard() { assert relocating(); return new ShardRouting(index, shardId, relocatingNodeId, currentNodeId, restoreSource, primary, ShardRoutingState.INITIALIZING, version, unassignedInfo, - AllocationId.newTargetRelocation(allocationId), true, expectedShardSize); + AllocationId.newTargetRelocation(allocationId), true, expectedShardSize); } /** @@ -538,29 +538,36 @@ public final class ShardRouting implements Streamable, ToXContent { return b; } + /** + * Returns true if this shard is a relocation target for another shard (i.e., was created with {@link #buildTargetRelocatingShard()} + */ + public boolean isRelocationTarget() { + return state == ShardRoutingState.INITIALIZING && relocatingNodeId != null; + } + /** returns true if the routing is the relocation target of the given routing */ public boolean isRelocationTargetOf(ShardRouting other) { boolean b = this.allocationId != null && other.allocationId != null && this.state == ShardRoutingState.INITIALIZING && - this.allocationId.getId().equals(other.allocationId.getRelocationId()); + this.allocationId.getId().equals(other.allocationId.getRelocationId()); assert b == false || other.state == ShardRoutingState.RELOCATING : - "ShardRouting is a relocation target but the source shard state isn't relocating. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation target but the source shard state isn't relocating. This [" + this + "], other [" + other + "]"; assert b == false || other.allocationId.getId().equals(this.allocationId.getRelocationId()) : - "ShardRouting is a relocation target but the source id isn't equal to source's allocationId.getRelocationId. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation target but the source id isn't equal to source's allocationId.getRelocationId. This [" + this + "], other [" + other + "]"; assert b == false || other.currentNodeId().equals(this.relocatingNodeId) : - "ShardRouting is a relocation target but source current node id isn't equal to target relocating node. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation target but source current node id isn't equal to target relocating node. This [" + this + "], other [" + other + "]"; assert b == false || this.currentNodeId().equals(other.relocatingNodeId) : - "ShardRouting is a relocation target but current node id isn't equal to source relocating node. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation target but current node id isn't equal to source relocating node. This [" + this + "], other [" + other + "]"; assert b == false || isSameShard(other) : - "ShardRouting is a relocation target but both routings are not of the same shard. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation target but both routings are not of the same shard. This [" + this + "], other [" + other + "]"; assert b == false || this.primary == other.primary : - "ShardRouting is a relocation target but primary flag is different. This [" + this + "], target [" + other + "]"; + "ShardRouting is a relocation target but primary flag is different. This [" + this + "], target [" + other + "]"; return b; } @@ -568,26 +575,26 @@ public final class ShardRouting implements Streamable, ToXContent { /** returns true if the routing is the relocation source for the given routing */ public boolean isRelocationSourceOf(ShardRouting other) { boolean b = this.allocationId != null && other.allocationId != null && other.state == ShardRoutingState.INITIALIZING && - other.allocationId.getId().equals(this.allocationId.getRelocationId()); + other.allocationId.getId().equals(this.allocationId.getRelocationId()); assert b == false || this.state == ShardRoutingState.RELOCATING : - "ShardRouting is a relocation source but shard state isn't relocating. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation source but shard state isn't relocating. This [" + this + "], other [" + other + "]"; assert b == false || this.allocationId.getId().equals(other.allocationId.getRelocationId()) : - "ShardRouting is a relocation source but the allocation id isn't equal to other.allocationId.getRelocationId. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation source but the allocation id isn't equal to other.allocationId.getRelocationId. This [" + this + "], other [" + other + "]"; assert b == false || this.currentNodeId().equals(other.relocatingNodeId) : - "ShardRouting is a relocation source but current node isn't equal to other's relocating node. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation source but current node isn't equal to other's relocating node. This [" + this + "], other [" + other + "]"; assert b == false || other.currentNodeId().equals(this.relocatingNodeId) : - "ShardRouting is a relocation source but relocating node isn't equal to other's current node. This [" + this + "], other [" + other + "]"; + "ShardRouting is a relocation source but relocating node isn't equal to other's current node. This [" + this + "], other [" + other + "]"; assert b == false || isSameShard(other) : - "ShardRouting is a relocation source but both routings are not of the same shard. This [" + this + "], target [" + other + "]"; + "ShardRouting is a relocation source but both routings are not of the same shard. This [" + this + "], target [" + other + "]"; assert b == false || this.primary == other.primary : - "ShardRouting is a relocation source but primary flag is different. This [" + this + "], target [" + other + "]"; + "ShardRouting is a relocation source but primary flag is different. This [" + this + "], target [" + other + "]"; return b; } @@ -701,14 +708,14 @@ public final class ShardRouting implements Streamable, ToXContent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject() - .field("state", state()) - .field("primary", primary()) - .field("node", currentNodeId()) - .field("relocating_node", relocatingNodeId()) - .field("shard", shardId().id()) - .field("index", shardId().index().name()) - .field("version", version); - if (expectedShardSize != UNAVAILABLE_EXPECTED_SHARD_SIZE){ + .field("state", state()) + .field("primary", primary()) + .field("node", currentNodeId()) + .field("relocating_node", relocatingNodeId()) + .field("shard", shardId().id()) + .field("index", shardId().index().name()) + .field("version", version); + if (expectedShardSize != UNAVAILABLE_EXPECTED_SHARD_SIZE) { builder.field("expected_shard_size_in_bytes", expectedShardSize); } if (restoreSource() != null) { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 7804b50c390..d7e4294ee10 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -19,10 +19,7 @@ package org.elasticsearch.index.shard; -import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.index.IndexCommit; -import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; -import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.*; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; @@ -194,8 +191,10 @@ public class IndexShard extends AbstractIndexShardComponent { private final IndexSearcherWrapper searcherWrapper; private final TimeValue inactiveTime; - /** True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link - * IndexingMemoryController}). */ + /** + * True if this shard is still indexing (recently) and false if we've been idle for long enough (as periodically checked by {@link + * IndexingMemoryController}). + */ private final AtomicBoolean active = new AtomicBoolean(); public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache, @@ -220,10 +219,10 @@ public class IndexShard extends AbstractIndexShardComponent { this.indexCache = indexCache; this.indexingService = new ShardIndexingService(shardId, indexSettings); this.getService = new ShardGetService(indexSettings, this, mapperService); - this.termVectorsService = provider.getTermVectorsService(); + this.termVectorsService = provider.getTermVectorsService(); this.searchService = new ShardSearchStats(settings); this.shardWarmerService = new ShardIndexWarmerService(shardId, indexSettings); - this.indicesQueryCache = provider.getIndicesQueryCache(); + this.indicesQueryCache = provider.getIndicesQueryCache(); this.shardQueryCache = new ShardRequestCache(shardId, indexSettings); this.shardFieldData = new ShardFieldData(); this.indexFieldDataService = indexFieldDataService; @@ -238,7 +237,7 @@ public class IndexShard extends AbstractIndexShardComponent { this.checkIndexOnStartup = settings.get("index.shard.check_on_startup", "false"); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, getFromSettings(logger, settings, Translog.Durabilty.REQUEST), - provider.getBigArrays(), threadPool); + provider.getBigArrays(), threadPool); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -394,7 +393,7 @@ public class IndexShard extends AbstractIndexShardComponent { * Marks the shard as recovering based on a recovery state, fails with exception is recovering is not allowed to be set. */ public IndexShardState markAsRecovering(String reason, RecoveryState recoveryState) throws IndexShardStartedException, - IndexShardRelocatedException, IndexShardRecoveringException, IndexShardClosedException { + IndexShardRelocatedException, IndexShardRecoveringException, IndexShardClosedException { synchronized (mutex) { if (state == IndexShardState.CLOSED) { throw new IndexShardClosedException(shardId); @@ -445,9 +444,21 @@ public class IndexShard extends AbstractIndexShardComponent { return previousState; } - public Engine.Index prepareIndex(SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin) { + public Engine.Index prepareIndexOnPrimary(SourceToParse source, long version, VersionType versionType) { try { - return prepareIndex(docMapper(source.type()), source, version, versionType, origin); + if (shardRouting.primary() == false) { + throw new IllegalIndexShardStateException(shardId, state, "shard is not a primary"); + } + return prepareIndex(docMapper(source.type()), source, version, versionType, Engine.Operation.Origin.PRIMARY); + } catch (Throwable t) { + verifyNotClosed(t); + throw t; + } + } + + public Engine.Index prepareIndexOnReplica(SourceToParse source, long version, VersionType versionType) { + try { + return prepareIndex(docMapper(source.type()), source, version, versionType, Engine.Operation.Origin.REPLICA); } catch (Throwable t) { verifyNotClosed(t); throw t; @@ -486,12 +497,28 @@ public class IndexShard extends AbstractIndexShardComponent { return created; } - public Engine.Delete prepareDelete(String type, String id, long version, VersionType versionType, Engine.Operation.Origin origin) { - long startTime = System.nanoTime(); + public Engine.Delete prepareDeleteOnPrimary(String type, String id, long version, VersionType versionType) { + if (shardRouting.primary() == false) { + throw new IllegalIndexShardStateException(shardId, state, "shard is not a primary"); + } final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); - return new Engine.Delete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, origin, startTime, false); + return prepareDelete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, Engine.Operation.Origin.PRIMARY); } + public Engine.Delete prepareDeleteOnReplica(String type, String id, long version, VersionType versionType) { + if (shardRouting.primary() && shardRouting.isRelocationTarget() == false) { + throw new IllegalIndexShardStateException(shardId, state, "shard is not a replica"); + } + final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); + return prepareDelete(type, id, documentMapper.uidMapper().term(Uid.createUid(type, id)), version, versionType, Engine.Operation.Origin.REPLICA); + } + + static Engine.Delete prepareDelete(String type, String id, Term uid, long version, VersionType versionType, Engine.Operation.Origin origin) { + long startTime = System.nanoTime(); + return new Engine.Delete(type, id, uid, version, versionType, origin, startTime, false); + } + + public void delete(Engine.Delete delete) { ensureWriteAllowed(delete); markLastWrite(); @@ -533,11 +560,8 @@ public class IndexShard extends AbstractIndexShardComponent { } public DocsStats docStats() { - final Engine.Searcher searcher = acquireSearcher("doc_stats"); - try { + try (Engine.Searcher searcher = acquireSearcher("doc_stats")) { return new DocsStats(searcher.reader().numDocs(), searcher.reader().numDeletedDocs()); - } finally { - searcher.close(); } } @@ -652,7 +676,7 @@ public class IndexShard extends AbstractIndexShardComponent { logger.trace("force merge with {}", forceMerge); } getEngine().forceMerge(forceMerge.flush(), forceMerge.maxNumSegments(), - forceMerge.onlyExpungeDeletes(), false, false); + forceMerge.onlyExpungeDeletes(), false, false); } /** @@ -666,8 +690,8 @@ public class IndexShard extends AbstractIndexShardComponent { org.apache.lucene.util.Version previousVersion = minimumCompatibleVersion(); // we just want to upgrade the segments, not actually forge merge to a single segment getEngine().forceMerge(true, // we need to flush at the end to make sure the upgrade is durable - Integer.MAX_VALUE, // we just want to upgrade the segments, not actually optimize to a single segment - false, true, upgrade.upgradeOnlyAncientSegments()); + Integer.MAX_VALUE, // we just want to upgrade the segments, not actually optimize to a single segment + false, true, upgrade.upgradeOnlyAncientSegments()); org.apache.lucene.util.Version version = minimumCompatibleVersion(); if (logger.isTraceEnabled()) { logger.trace("upgraded segment {} from version {} to version {}", previousVersion, version); @@ -897,7 +921,7 @@ public class IndexShard extends AbstractIndexShardComponent { public boolean ignoreRecoveryAttempt() { IndexShardState state = state(); // one time volatile read return state == IndexShardState.POST_RECOVERY || state == IndexShardState.RECOVERING || state == IndexShardState.STARTED || - state == IndexShardState.RELOCATED || state == IndexShardState.CLOSED; + state == IndexShardState.RELOCATED || state == IndexShardState.CLOSED; } public void readAllowed() throws IllegalIndexShardStateException { @@ -977,8 +1001,10 @@ public class IndexShard extends AbstractIndexShardComponent { this.shardEventListener.delegates.add(onShardFailure); } - /** Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than - * the new buffering indexing size then we do a refresh to free up the heap. */ + /** + * Change the indexing and translog buffer sizes. If {@code IndexWriter} is currently using more than + * the new buffering indexing size then we do a refresh to free up the heap. + */ public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { final EngineConfig config = engineConfig; @@ -1000,7 +1026,7 @@ public class IndexShard extends AbstractIndexShardComponent { long iwBytesUsed = engine.indexWriterRAMBytesUsed(); String message = LoggerMessageFormat.format("updating index_buffer_size from [{}] to [{}]; IndexWriter now using [{}] bytes", - preValue, shardIndexingBufferSize, iwBytesUsed); + preValue, shardIndexingBufferSize, iwBytesUsed); if (iwBytesUsed > shardIndexingBufferSize.bytes()) { // our allowed buffer was changed to less than we are currently using; we ask IW to refresh @@ -1021,9 +1047,11 @@ public class IndexShard extends AbstractIndexShardComponent { engine.getTranslog().updateBuffer(shardTranslogBufferSize); } - /** Called by {@link IndexingMemoryController} to check whether more than {@code inactiveTimeNS} has passed since the last - * indexing operation, and become inactive (reducing indexing and translog buffers to tiny values) if so. This returns true - * if the shard is inactive. */ + /** + * Called by {@link IndexingMemoryController} to check whether more than {@code inactiveTimeNS} has passed since the last + * indexing operation, and become inactive (reducing indexing and translog buffers to tiny values) if so. This returns true + * if the shard is inactive. + */ public boolean checkIdle() { return checkIdle(inactiveTime.nanos()); } @@ -1042,8 +1070,10 @@ public class IndexShard extends AbstractIndexShardComponent { return active.get() == false; } - /** Returns {@code true} if this shard is active (has seen indexing ops in the last {@link - * IndexShard#INDEX_SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}. */ + /** + * Returns {@code true} if this shard is active (has seen indexing ops in the last {@link + * IndexShard#INDEX_SHARD_INACTIVE_TIME_SETTING} (default 5 minutes), else {@code false}. + */ public boolean getActive() { return active.get(); } @@ -1077,10 +1107,10 @@ public class IndexShard extends AbstractIndexShardComponent { return storeRecovery.recoverFromStore(this, shouldExist, localNode); } - public boolean restoreFromRepository(IndexShardRepository repository, DiscoveryNode locaNode) { + public boolean restoreFromRepository(IndexShardRepository repository, DiscoveryNode localNode) { assert shardRouting.primary() : "recover from store only makes sense if the shard is a primary shard"; StoreRecovery storeRecovery = new StoreRecovery(shardId, logger); - return storeRecovery.recoverFromRepository(this, repository, locaNode); + return storeRecovery.recoverFromRepository(this, repository, localNode); } /** @@ -1369,8 +1399,10 @@ public class IndexShard extends AbstractIndexShardComponent { return engine; } - /** NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is - * closed. */ + /** + * NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is + * closed. + */ protected Engine getEngineOrNull() { return this.currentEngineReference.get(); } @@ -1427,9 +1459,9 @@ public class IndexShard extends AbstractIndexShardComponent { writeReason = "routing changed from " + currentRouting + " to " + newRouting; } else { logger.trace("skip writing shard state, has been written before; previous version: [" + - currentRouting.version() + "] current version [" + newRouting.version() + "]"); + currentRouting.version() + "] current version [" + newRouting.version() + "]"); assert currentRouting.version() <= newRouting.version() : "version should not go backwards for shardID: " + shardId + - " previous version: [" + currentRouting.version() + "] current version [" + newRouting.version() + "]"; + " previous version: [" + currentRouting.version() + "] current version [" + newRouting.version() + "]"; return; } final ShardStateMetaData newShardStateMetadata = new ShardStateMetaData(newRouting.version(), newRouting.primary(), getIndexUUID(), newRouting.allocationId()); @@ -1461,8 +1493,8 @@ public class IndexShard extends AbstractIndexShardComponent { }; final Engine.Warmer engineWarmer = (searcher, toLevel) -> warmer.warm(searcher, this, idxSettings, toLevel); return new EngineConfig(shardId, - threadPool, indexingService, indexSettings, engineWarmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig, - mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, inactiveTime); + threadPool, indexingService, indexSettings, engineWarmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig, + mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, inactiveTime); } private static class IndexShardOperationCounter extends AbstractRefCounted { @@ -1578,6 +1610,7 @@ public class IndexShard extends AbstractIndexShardComponent { /** * Simple struct encapsulating a shard failure + * * @see IndexShard#addShardFailureCallback(Callback) */ public static final class ShardFailure { @@ -1604,7 +1637,7 @@ public class IndexShard extends AbstractIndexShardComponent { }; private QueryShardContext newQueryShardContext() { - return new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry()); + return new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry()); } /** diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index 146e80c7665..54e39cc227d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -99,12 +99,18 @@ public class ShardRoutingTests extends ESTestCase { ShardRouting initializingShard0 = TestShardRouting.newShardRouting("test", 0, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1); ShardRouting initializingShard1 = TestShardRouting.newShardRouting("test", 1, "node1", randomBoolean(), ShardRoutingState.INITIALIZING, 1); ShardRouting startedShard0 = new ShardRouting(initializingShard0); + assertFalse(startedShard0.isRelocationTarget()); startedShard0.moveToStarted(); + assertFalse(startedShard0.isRelocationTarget()); ShardRouting startedShard1 = new ShardRouting(initializingShard1); + assertFalse(startedShard1.isRelocationTarget()); startedShard1.moveToStarted(); + assertFalse(startedShard1.isRelocationTarget()); ShardRouting sourceShard0a = new ShardRouting(startedShard0); sourceShard0a.relocate("node2", -1); + assertFalse(sourceShard0a.isRelocationTarget()); ShardRouting targetShard0a = sourceShard0a.buildTargetRelocatingShard(); + assertTrue(targetShard0a.isRelocationTarget()); ShardRouting sourceShard0b = new ShardRouting(startedShard0); sourceShard0b.relocate("node2", -1); ShardRouting sourceShard1 = new ShardRouting(startedShard1); From 0809e4a65fb73a1be6e1b9c717f83b6b21b7e989 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 8 Dec 2015 09:36:10 +0100 Subject: [PATCH 16/57] typo fix --- .../org/elasticsearch/action/index/TransportIndexAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 06a417240d4..6fb7ab6e977 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -222,7 +222,7 @@ public class TransportIndexAction extends TransportReplicationAction Date: Tue, 8 Dec 2015 11:58:28 +0100 Subject: [PATCH 17/57] Handle cancel exceptions on recovery target if the cancel comes from the source Today we only handle correctly if the `ExecutionCancelledException` comes from the local execution. Yet, this can also come from remove and should be handled identically. --- .../elasticsearch/indices/recovery/RecoveryTarget.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 2ccfbcb5420..32e644ab7b8 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -134,12 +134,12 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe logger.trace("will retry recovery with id [{}] in [{}]", reason, recoveryStatus.recoveryId(), retryAfter); retryRecovery(recoveryStatus, retryAfter, currentRequest); } - + protected void retryRecovery(final RecoveryStatus recoveryStatus, final String reason, TimeValue retryAfter, final StartRecoveryRequest currentRequest) { logger.trace("will retry recovery with id [{}] in [{}] (reason [{}])", recoveryStatus.recoveryId(), retryAfter, reason); retryRecovery(recoveryStatus, retryAfter, currentRequest); } - + private void retryRecovery(final RecoveryStatus recoveryStatus, TimeValue retryAfter, final StartRecoveryRequest currentRequest) { try { recoveryStatus.resetRecovery(); @@ -208,11 +208,15 @@ public class RecoveryTarget extends AbstractComponent implements IndexEventListe } catch (CancellableThreads.ExecutionCancelledException e) { logger.trace("recovery cancelled", e); } catch (Throwable e) { - if (logger.isTraceEnabled()) { logger.trace("[{}][{}] Got exception on recovery", e, request.shardId().index().name(), request.shardId().id()); } Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof CancellableThreads.ExecutionCancelledException) { + // this can also come from the source wrapped in a RemoteTransportException + onGoingRecoveries.failRecovery(recoveryStatus.recoveryId(), new RecoveryFailedException(request, "source has canceled the recovery", cause), false); + return; + } if (cause instanceof RecoveryEngineException) { // unwrap an exception that was thrown as part of the recovery cause = cause.getCause(); From a6ba351fb2f2a7b4d8fa14bdc9127bc3a2b0f95a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Dec 2015 08:04:11 -0500 Subject: [PATCH 18/57] punch thru symlinks when loading plugins/modules this ensures the codebase URL matches the permission grant (see matching toRealPath in Security.java) in the case of symlinks or other shenanigans. this is best effort, if we really want to support symlinks in any way, we need e.g. qa or vagrant tests that configure a bunch of symlinks for things and ensure that in jenkins. this should be easier to do with gradle, as we can just create a symlink'd home if we want --- .../main/java/org/elasticsearch/plugins/PluginsService.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 4cd5f114616..5ebd43d5026 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -316,7 +316,8 @@ public class PluginsService extends AbstractComponent { // gather urls for jar files try (DirectoryStream jarStream = Files.newDirectoryStream(module, "*.jar")) { for (Path jar : jarStream) { - bundle.urls.add(jar.toUri().toURL()); + // normalize with toRealPath to get symlinks out of our hair + bundle.urls.add(jar.toRealPath().toUri().toURL()); } } bundles.add(bundle); @@ -357,7 +358,8 @@ public class PluginsService extends AbstractComponent { // a jvm plugin: gather urls for jar files try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { for (Path jar : jarStream) { - urls.add(jar.toUri().toURL()); + // normalize with toRealPath to get symlinks out of our hair + urls.add(jar.toRealPath().toUri().toURL()); } } } From e0aa481bf5426aeb12076f6b1346357073374f05 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Tue, 8 Dec 2015 10:24:32 +0100 Subject: [PATCH 19/57] Merge pull request #15213 from brwe/copy-to-in-multi-fields-exception throw exception if a copy_to is within a multi field Copy to within multi field is ignored from 2.0 on, see #10802. Instead of just ignoring it, we should throw an exception if this is found in the mapping when a mapping is added. For already existing indices we should at least log a warning. We remove the copy_to in any case. related to #14946 --- .../elasticsearch/index/mapper/Mapper.java | 20 ++++ .../index/mapper/core/TypeParsers.java | 26 +++-- .../core/MultiFieldCopyToMapperTests.java | 105 ++++++++++++++++++ .../migration/migrate_2_0/mapping.asciidoc | 8 ++ .../attachments/AttachmentUnitTestCase.java | 9 +- .../DateAttachmentMapperTests.java | 3 +- .../attachments/EncryptedDocMapperTests.java | 11 +- ...anguageDetectionAttachmentMapperTests.java | 7 +- .../attachments/MetadataMapperTests.java | 4 +- .../MultifieldAttachmentMapperTests.java | 6 +- .../SimpleAttachmentMapperTests.java | 12 +- .../mapper/attachments/StandaloneRunner.java | 5 +- .../mapper/attachments/VariousDocTests.java | 8 +- .../elasticsearch/index}/MapperTestUtils.java | 37 +++--- 14 files changed, 209 insertions(+), 52 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/index/mapper/core/MultiFieldCopyToMapperTests.java rename {plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments => test-framework/src/main/java/org/elasticsearch/index}/MapperTestUtils.java (66%) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index a8d0c0a706d..c9877410c30 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -134,6 +134,26 @@ public abstract class Mapper implements ToXContent, Iterable { public ParseFieldMatcher parseFieldMatcher() { return parseFieldMatcher; } + + public boolean isWithinMultiField() { return false; } + + protected Function typeParsers() { return typeParsers; } + + protected Function similarityLookupService() { return similarityLookupService; } + + public ParserContext createMultiFieldContext(ParserContext in) { + return new MultiFieldParserContext(in) { + @Override + public boolean isWithinMultiField() { return true; } + }; + } + + class MultiFieldParserContext extends ParserContext { + MultiFieldParserContext(ParserContext in) { + super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher()); + } + } + } Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index c0e0cef0f0c..a3938a48a5b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -25,6 +25,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -184,11 +185,12 @@ public class TypeParsers { public static void parseField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer(); NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer(); + Version indexVersionCreated = parserContext.indexVersionCreated(); for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); final String propName = Strings.toUnderscoreCase(entry.getKey()); final Object propNode = entry.getValue(); - if (propName.equals("index_name") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { + if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { builder.indexName(propNode.toString()); iterator.remove(); } else if (propName.equals("store")) { @@ -239,7 +241,7 @@ public class TypeParsers { iterator.remove(); } else if (propName.equals("omit_term_freq_and_positions")) { final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; - if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) { + if (indexVersionCreated.onOrAfter(Version.V_1_0_0_RC2)) { throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : 'docs'] instead"); } // deprecated option for BW compat @@ -249,8 +251,8 @@ public class TypeParsers { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); } else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0 - propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - + propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { + NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); @@ -267,10 +269,10 @@ public class TypeParsers { } else if (propName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("postings_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { + } else if (propName.equals("postings_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { // ignore for old indexes iterator.remove(); - } else if (propName.equals("doc_values_format") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { + } else if (propName.equals("doc_values_format") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { // ignore for old indexes iterator.remove(); } else if (propName.equals("similarity")) { @@ -281,7 +283,16 @@ public class TypeParsers { builder.fieldDataSettings(settings); iterator.remove(); } else if (propName.equals("copy_to")) { - parseCopyFields(propNode, builder); + if (parserContext.isWithinMultiField()) { + if (indexVersionCreated.after(Version.V_2_1_0) || + (indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) { + throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); + } else { + ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [" + name + "] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping."); + } + } else { + parseCopyFields(propNode, builder); + } iterator.remove(); } } @@ -298,6 +309,7 @@ public class TypeParsers { } public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { + parserContext = parserContext.createMultiFieldContext(parserContext); if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { builder.multiFieldPathType(parsePathType(name, propNode.toString())); return true; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/MultiFieldCopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/MultiFieldCopyToMapperTests.java new file mode 100644 index 00000000000..821eaeb8365 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/MultiFieldCopyToMapperTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.index.mapper.core; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.core.IsEqual.equalTo; + +public class MultiFieldCopyToMapperTests extends ESTestCase { + + public void testExceptionForCopyToInMultiFields() throws IOException { + XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); + Tuple, List> versionsWithAndWithoutExpectedExceptions = versionsWithAndWithoutExpectedExceptions(); + + // first check that for newer versions we throw exception if copy_to is found withing multi field + Version indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v1()); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); + try { + mapperService.parse("type", new CompressedXContent(mapping.string()), true); + fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field.")); + } + + // now test that with an older version the pasring just works + indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v2()); + mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); + DocumentMapper documentMapper = mapperService.parse("type", new CompressedXContent(mapping.string()), true); + assertFalse(documentMapper.mapping().toString().contains("copy_to")); + } + + private static XContentBuilder createMappinmgWithCopyToInMultiField() throws IOException { + XContentBuilder mapping = jsonBuilder(); + mapping.startObject() + .startObject("type") + .startObject("properties") + .startObject("a") + .field("type", "string") + .endObject() + .startObject("b") + .field("type", "string") + .startObject("fields") + .startObject("c") + .field("type", "string") + .field("copy_to", "a") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + return mapping; + } + + // returs a tuple where + // v1 is a list of versions for which we expect an excpetion when a copy_to in multi fields is found and + // v2 is older versions where we throw no exception and we just log a warning + private static Tuple, List> versionsWithAndWithoutExpectedExceptions() { + List versionsWithException = new ArrayList<>(); + List versionsWithoutException = new ArrayList<>(); + for (Version version : VersionUtils.allVersions()) { + if (version.after(Version.V_2_1_0) || + (version.after(Version.V_2_0_1) && version.before(Version.V_2_1_0))) { + versionsWithException.add(version); + } else { + versionsWithoutException.add(version); + } + } + return new Tuple<>(versionsWithException, versionsWithoutException); + } +} diff --git a/docs/reference/migration/migrate_2_0/mapping.asciidoc b/docs/reference/migration/migrate_2_0/mapping.asciidoc index 33ef9ebc5e5..09170a2e718 100644 --- a/docs/reference/migration/migrate_2_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_2_0/mapping.asciidoc @@ -429,3 +429,11 @@ to use the old default of 0. This was done to prevent phrase queries from matching across different values of the same term unexpectedly. Specifically, 100 was chosen to cause phrase queries with slops up to 99 to match only within a single value of a field. + +==== copy_to and multi fields + +A <> within a <> is ignored from version 2.0 on. With any version after +2.1 or 2.0.1 creating a mapping that has a copy_to within a multi field will result +in an exception. + + diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java index 9378f2d71f9..9b7d8afe381 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java @@ -22,13 +22,20 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; import org.junit.Before; public class AttachmentUnitTestCase extends ESTestCase { protected Settings testSettings; - + + protected static IndicesModule getIndicesModuleWithRegisteredAttachmentMapper() { + IndicesModule indicesModule = new IndicesModule(); + indicesModule.registerMapper(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser()); + return indicesModule; + } + @Before public void createSettings() throws Exception { testSettings = Settings.builder() diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java index 858ed8a767f..f93785ed14a 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/DateAttachmentMapperTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -37,7 +38,7 @@ public class DateAttachmentMapperTests extends AttachmentUnitTestCase { @Before public void setupMapperParser() throws Exception { - mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); } public void testSimpleMappings() throws Exception { diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java index e086d9ba5c4..10e82e24c84 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/EncryptedDocMapperTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import java.io.IOException; @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.*; public class EncryptedDocMapperTests extends AttachmentUnitTestCase { public void testMultipleDocsEncryptedLast() throws IOException { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); @@ -72,7 +72,7 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { } public void testMultipleDocsEncryptedFirst() throws IOException { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html"); @@ -103,9 +103,8 @@ public class EncryptedDocMapperTests extends AttachmentUnitTestCase { public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException { try { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), - Settings.builder() - .put("index.mapping.attachment.ignore_errors", false) - .build()).documentMapperParser(); + Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(), + getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java index b2d361fe847..868ecb3ae55 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/LanguageDetectionAttachmentMapperTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import org.junit.Before; import java.io.IOException; @@ -50,9 +50,8 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa public void setupMapperParser(boolean langDetect) throws IOException { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), - Settings.settingsBuilder() - .put("index.mapping.attachment.detect_language", langDetect) - .build()).documentMapperParser(); + Settings.settingsBuilder().put("index.mapping.attachment.detect_language", langDetect).build(), + getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/language/language-mapping.json"); docMapper = mapperParser.parse(mapping); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java index cf2a130829f..acf0163acd9 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MetadataMapperTests.java @@ -21,11 +21,11 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import java.io.IOException; @@ -44,7 +44,7 @@ public class MetadataMapperTests extends AttachmentUnitTestCase { .put(this.testSettings) .put(otherSettings) .build(); - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/metadata/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index 4f070bd0dd1..40593ddb1bb 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -22,13 +22,13 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.common.Base64; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; @@ -48,7 +48,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { @Before public void setupMapperParser() throws Exception { - mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); } @@ -91,7 +91,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1)); threadPool = new ThreadPool("testing-only"); - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json"); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 0023fc44e24..01e87dc1430 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; @@ -42,7 +43,7 @@ import static org.hamcrest.Matchers.*; public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testSimpleMappings() throws Exception { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); @@ -69,9 +70,8 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { public void testContentBackcompat() throws Exception { DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), - Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .build()).documentMapperParser(); + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(), + getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json"); DocumentMapper docMapper = mapperParser.parse(mapping); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); @@ -86,7 +86,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { * test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/179 */ public void testSimpleMappingsWithAllFields() throws Exception { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json"); DocumentMapper docMapper = mapperParser.parse(mapping); byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/testXHTML.html"); @@ -131,7 +131,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { .endObject(); byte[] mapping = mappingBuilder.bytes().toBytes(); - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); DocumentMapper docMapper = mapperService.parse("mail", new CompressedXContent(mapping), true); // this should not throw an exception mapperService.parse("mail", new CompressedXContent(docMapper.mapping().toString()), true); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java index f6264337613..fcd430d0fbc 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/StandaloneRunner.java @@ -30,10 +30,10 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import java.io.FileNotFoundException; import java.io.IOException; @@ -46,6 +46,7 @@ import static org.elasticsearch.common.cli.CliToolConfig.Builder.cmd; import static org.elasticsearch.common.cli.CliToolConfig.Builder.option; import static org.elasticsearch.common.io.Streams.copy; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.mapper.attachments.AttachmentUnitTestCase.getIndicesModuleWithRegisteredAttachmentMapper; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; /** @@ -88,7 +89,7 @@ public class StandaloneRunner extends CliTool { this.size = size; this.url = url; this.base64text = base64text; - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY).documentMapperParser(); // use CWD b/c it won't be used + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); // use CWD b/c it won't be used String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json"); docMapper = mapperParser.parse(mapping); diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java index c2569fd3663..5341e038cff 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/VariousDocTests.java @@ -23,10 +23,10 @@ import org.apache.tika.io.IOUtils; import org.apache.tika.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.mapper.attachments.AttachmentMapper; import org.junit.Before; import java.io.IOException; @@ -48,7 +48,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { @Before public void createMapper() throws IOException { - DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser(); + DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()).documentMapperParser(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json"); docMapper = mapperParser.parse(mapping); @@ -93,7 +93,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { assertParseable("text-in-english.txt"); testMapper("text-in-english.txt", false); } - + /** * Test for .epub */ @@ -129,7 +129,7 @@ public class VariousDocTests extends AttachmentUnitTestCase { protected void assertParseable(String filename) throws Exception { try (InputStream is = VariousDocTests.class.getResourceAsStream("/org/elasticsearch/index/mapper/attachment/test/sample-files/" + filename)) { byte bytes[] = IOUtils.toByteArray(is); - String parsedContent = TikaImpl.parse(bytes, new Metadata(), -1); + String parsedContent = TikaImpl.parse(bytes, new Metadata(), -1); assertThat(parsedContent, not(isEmptyOrNullString())); logger.debug("extracted content: {}", parsedContent); } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperTestUtils.java b/test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java similarity index 66% rename from plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperTestUtils.java rename to test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 1513f7ba6e0..8b529f9fc8f 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperTestUtils.java +++ b/test-framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.mapper.attachments; +package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MapperService; @@ -37,22 +35,29 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Collections; -class MapperTestUtils { + +public class MapperTestUtils { public static MapperService newMapperService(Path tempDir, Settings indexSettings) throws IOException { - Settings nodeSettings = Settings.builder() - .put("path.home", tempDir) - .build(); - indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(indexSettings) - .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("test"), indexSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(idxSettings); - SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); IndicesModule indicesModule = new IndicesModule(); - indicesModule.registerMapper(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser()); + return newMapperService(tempDir, indexSettings, indicesModule); + } + + public static MapperService newMapperService(Path tempDir, Settings settings, IndicesModule indicesModule) throws IOException { + Settings.Builder settingsBuilder = Settings.builder() + .put("path.home", tempDir) + .put(settings); + if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) { + settingsBuilder.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); + } + Settings finalSettings = settingsBuilder.build(); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - return new MapperService(idxSettings, analysisService, similarityService, mapperRegistry); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("test"), finalSettings); + AnalysisService analysisService = new AnalysisRegistry(null, new Environment(finalSettings)).build(indexSettings); + SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); + return new MapperService(indexSettings, + analysisService, + similarityService, + mapperRegistry); } } From 73a3c326c94e8f75237dd5fb6e8f2a4961ea00de Mon Sep 17 00:00:00 2001 From: Myll Date: Tue, 28 Apr 2015 20:50:22 -0400 Subject: [PATCH 20/57] _cat APIs: remove space at the end of a line Fixes #9464 --- .../rest/action/support/RestTable.java | 10 ++- .../test/cat.aliases/10_basic.yaml | 4 +- .../test/cat.allocation/10_basic.yaml | 4 +- .../test/cat.count/10_basic.yaml | 13 ++-- .../test/cat.fielddata/10_basic.yaml | 12 ++-- .../test/cat.health/10_basic.yaml | 2 +- .../test/cat.nodes/10_basic.yaml | 18 +++--- .../test/cat.recovery/10_basic.yaml | 2 +- .../test/cat.shards/10_basic.yaml | 2 +- .../test/cat.thread_pool/10_basic.yaml | 64 +++++++++---------- 10 files changed, 68 insertions(+), 63 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index e1c62049843..549624059fa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -75,20 +75,24 @@ public class RestTable { BytesStreamOutput bytesOut = channel.bytesOutput(); UTF8StreamWriter out = new UTF8StreamWriter().setOutput(bytesOut); + int lastHeader = headers.size() - 1; if (verbose) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out); - out.append(" "); + if (col != lastHeader) { + out.append(" "); + } } out.append("\n"); } - for (int row = 0; row < table.getRows().size(); row++) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); pad(table.getAsMap().get(header.name).get(row), width[col], request, out); - out.append(" "); + if (col != lastHeader) { + out.append(" "); + } } out.append("\n"); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml index 640d77e0183..73285eaa656 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml @@ -148,7 +148,7 @@ index \s+ filter \s+ routing.index \s+ - routing.search \s+ + routing.search \n test_1 \s+ test \s+ @@ -186,5 +186,5 @@ $body: | /^ index \s+ alias \s+ \n - test \s+ test_1 \s+ \n + test \s+ test_1 \n $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index be25839485d..c0a5a079e5d 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -71,7 +71,7 @@ ( \s* #allow leading spaces to account for right-justified text \d+ \s+ - UNASSIGNED \s+ + UNASSIGNED \n )? $/ @@ -134,7 +134,7 @@ ( \s* #allow leading spaces to account for right-justified text \d+ \s+ - UNASSIGNED \s+ + UNASSIGNED \n )? $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml index 1a62ab063d9..30199466616 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml @@ -19,7 +19,7 @@ - match: $body: | /# epoch timestamp count - ^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \s+ \n $/ + ^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \n $/ - do: index: @@ -35,7 +35,7 @@ - match: $body: | /# epoch timestamp count - ^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \s+ \n $/ + ^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \n $/ - do: index: @@ -52,7 +52,7 @@ - match: $body: | /# count - ^ 2 \s+ \n $/ + ^ 2 \n $/ - do: @@ -62,7 +62,7 @@ - match: $body: | /# epoch timestamp count - ^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \s+ \n $/ + ^ \d+ \s \d{2}:\d{2}:\d{2} \s 1 \n $/ - do: cat.count: @@ -71,5 +71,6 @@ - match: $body: | - /^ epoch \s+ timestamp \s+ count \s+ \n - \d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \s+ \n $/ + /^ epoch \s+ timestamp \s+ count \n + \d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \s+ \n $/ + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml index bc362fae58c..dfc580da1dd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.fielddata/10_basic.yaml @@ -38,8 +38,8 @@ - match: $body: | - /^ total \s \n - (\s*\d+(\.\d+)?[gmk]?b \s \n)+ $/ + /^ total \n + (\s*\d+(\.\d+)?[gmk]?b \n)+ $/ - do: cat.fielddata: @@ -48,8 +48,8 @@ - match: $body: | - /^ total \s+ foo \s+ \n - (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \s \n)+ \s*$/ + /^ total \s+ foo \n + (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/ - do: cat.fielddata: @@ -59,5 +59,5 @@ - match: $body: | - /^ total \s+ foo \s+ \n - (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \s \n)+ \s*$/ + /^ total \s+ foo \n + (\s*\d+(\.\d+)?[gmk]?b \s+ \d+(\.\d+)?[gmk]?b \n)+ $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml index 9bfde46a371..0692df28a08 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.health/10_basic.yaml @@ -46,7 +46,7 @@ \d+ \s+ # unassign \d+ \s+ # pending_tasks (-|\d+[.]\d+ms|s) \s+ # max task waiting time - \d+\.\d+% \s+ # active shards percent + \d+\.\d+% # active shards percent \n )+ $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml index 5a4cbc89ad1..77aaecf51c2 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -7,7 +7,7 @@ - match: $body: | / #host ip heap.percent ram.percent cpu load node.role master name - ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \s+ \n)+ $/ + ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: @@ -15,8 +15,8 @@ - match: $body: | - /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \s+ \n - (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \s+ \n)+ $/ + /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \s+ \n + (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: cat.nodes: @@ -25,8 +25,8 @@ - match: $body: | - /^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n - (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/ + /^ heap\.current \s+ heap\.percent \s+ heap\.max \n + (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \n)+ $/ - do: cat.nodes: @@ -35,8 +35,8 @@ - match: $body: | - /^ heap\.current \s+ heap\.percent \s+ heap\.max \s+ \n - (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \s+ \n)+ $/ + /^ heap\.current \s+ heap\.percent \s+ heap\.max \n + (\s+ \d+(\.\d+)?[ptgmk]?b \s+ \d+ \s+ \d+(\.\d+)?[ptgmk]?b \n)+ $/ - do: cat.nodes: @@ -46,5 +46,5 @@ - match: # Windows reports -1 for the file descriptor counts. $body: | - /^ file_desc\.current \s+ file_desc\.percent \s+ file_desc\.max \s+ \n - (\s+ (-1|\d+) \s+ \d+ \s+ (-1|\d+) \s+ \n)+ $/ + /^ file_desc\.current \s+ file_desc\.percent \s+ file_desc\.max \n + (\s+ (-1|\d+) \s+ \d+ \s+ (-1|\d+) \n)+ $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index c34437ce064..b081aa4d8cc 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -42,7 +42,7 @@ \d+ \s+ # total_bytes \d+ \s+ # translog -?\d+\.\d+% \s+ # translog_percent - -?\d+ \s+ # total_translog + -?\d+ # total_translog \n )+ $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index 5a139234523..f264928c21b 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -108,7 +108,7 @@ h: index,state,sync_id - match: $body: | - /^(sync_id_test\s+STARTED\s+[A-Za-z0-9_\-]{20}\s+\n){5}$/ + /^(sync_id_test\s+STARTED\s+[A-Za-z0-9_\-]{20}\n){5}$/ - do: indices.delete: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml index d362c15d933..8d59e7c139c 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yaml @@ -7,7 +7,7 @@ - match: $body: | / #host ip bulk.active bulk.queue bulk.rejected index.active index.queue index.rejected search.active search.queue search.rejected - ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/ + ^ (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ - do: cat.thread_pool: @@ -15,8 +15,8 @@ - match: $body: | - /^ host \s+ ip \s+ bulk.active \s+ bulk.queue \s+ bulk.rejected \s+ index.active \s+ index.queue \s+ index.rejected \s+ search.active \s+ search.queue \s+ search.rejected \s+ \n - (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/ + /^ host \s+ ip \s+ bulk.active \s+ bulk.queue \s+ bulk.rejected \s+ index.active \s+ index.queue \s+ index.rejected \s+ search.active \s+ search.queue \s+ search.rejected \n + (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ - do: cat.thread_pool: @@ -25,7 +25,7 @@ - match: $body: | / #pid id host ip port - ^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \s+ \n)+ $/ + ^ (\d+ \s+ \S{4} \s+ \S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ (\d+|-) \n)+ $/ - do: cat.thread_pool: @@ -35,8 +35,8 @@ - match: $body: | - /^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \s+ pa \s+ \n - (\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \n)+ $/ + /^ id \s+ ba \s+ fa \s+ gea \s+ ga \s+ ia \s+ maa \s+ fma \s+ pa \n + (\S+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ - do: cat.thread_pool: @@ -45,8 +45,8 @@ - match: $body: | - /^ id \s+ bulk.type \s+ bulk.active \s+ bulk.size \s+ bulk.queue \s+ bulk.queueSize \s+ bulk.rejected \s+ bulk.largest \s+ bulk.completed \s+ bulk.min \s+ bulk.max \s+ bulk.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ bulk.type \s+ bulk.active \s+ bulk.size \s+ bulk.queue \s+ bulk.queueSize \s+ bulk.rejected \s+ bulk.largest \s+ bulk.completed \s+ bulk.min \s+ bulk.max \s+ bulk.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -55,8 +55,8 @@ - match: $body: | - /^ id \s+ flush.type \s+ flush.active \s+ flush.size \s+ flush.queue \s+ flush.queueSize \s+ flush.rejected \s+ flush.largest \s+ flush.completed \s+ flush.min \s+ flush.max \s+ flush.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ flush.type \s+ flush.active \s+ flush.size \s+ flush.queue \s+ flush.queueSize \s+ flush.rejected \s+ flush.largest \s+ flush.completed \s+ flush.min \s+ flush.max \s+ flush.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -65,8 +65,8 @@ - match: $body: | - /^ id \s+ generic.type \s+ generic.active \s+ generic.size \s+ generic.queue \s+ generic.queueSize \s+ generic.rejected \s+ generic.largest \s+ generic.completed \s+ generic.min \s+ generic.max \s+ generic.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ generic.type \s+ generic.active \s+ generic.size \s+ generic.queue \s+ generic.queueSize \s+ generic.rejected \s+ generic.largest \s+ generic.completed \s+ generic.min \s+ generic.max \s+ generic.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -75,8 +75,8 @@ - match: $body: | - /^ id \s+ get.type \s+ get.active \s+ get.size \s+ get.queue \s+ get.queueSize \s+ get.rejected \s+ get.largest \s+ get.completed \s+ get.min \s+ get.max \s+ get.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ get.type \s+ get.active \s+ get.size \s+ get.queue \s+ get.queueSize \s+ get.rejected \s+ get.largest \s+ get.completed \s+ get.min \s+ get.max \s+ get.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -85,8 +85,8 @@ - match: $body: | - /^ id \s+ index.type \s+ index.active \s+ index.size \s+ index.queue \s+ index.queueSize \s+ index.rejected \s+ index.largest \s+ index.completed \s+ index.min \s+ index.max \s+ index.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ index.type \s+ index.active \s+ index.size \s+ index.queue \s+ index.queueSize \s+ index.rejected \s+ index.largest \s+ index.completed \s+ index.min \s+ index.max \s+ index.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -95,8 +95,8 @@ - match: $body: | - /^ id \s+ management.type \s+ management.active \s+ management.size \s+ management.queue \s+ management.queueSize \s+ management.rejected \s+ management.largest \s+ management.completed \s+ management.min \s+ management.max \s+ management.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ management.type \s+ management.active \s+ management.size \s+ management.queue \s+ management.queueSize \s+ management.rejected \s+ management.largest \s+ management.completed \s+ management.min \s+ management.max \s+ management.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -105,8 +105,8 @@ - match: $body: | - /^ id \s+ force_merge.type \s+ force_merge.active \s+ force_merge.size \s+ force_merge.queue \s+ force_merge.queueSize \s+ force_merge.rejected \s+ force_merge.largest \s+ force_merge.completed \s+ force_merge.min \s+ force_merge.max \s+ force_merge.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ force_merge.type \s+ force_merge.active \s+ force_merge.size \s+ force_merge.queue \s+ force_merge.queueSize \s+ force_merge.rejected \s+ force_merge.largest \s+ force_merge.completed \s+ force_merge.min \s+ force_merge.max \s+ force_merge.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -115,8 +115,8 @@ - match: $body: | - /^ id \s+ percolate.type \s+ percolate.active \s+ percolate.size \s+ percolate.queue \s+ percolate.queueSize \s+ percolate.rejected \s+ percolate.largest \s+ percolate.completed \s+ percolate.min \s+ percolate.max \s+ percolate.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ percolate.type \s+ percolate.active \s+ percolate.size \s+ percolate.queue \s+ percolate.queueSize \s+ percolate.rejected \s+ percolate.largest \s+ percolate.completed \s+ percolate.min \s+ percolate.max \s+ percolate.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -125,8 +125,8 @@ - match: $body: | - /^ id \s+ refresh.type \s+ refresh.active \s+ refresh.size \s+ refresh.queue \s+ refresh.queueSize \s+ refresh.rejected \s+ refresh.largest \s+ refresh.completed \s+ refresh.min \s+ refresh.max \s+ refresh.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ refresh.type \s+ refresh.active \s+ refresh.size \s+ refresh.queue \s+ refresh.queueSize \s+ refresh.rejected \s+ refresh.largest \s+ refresh.completed \s+ refresh.min \s+ refresh.max \s+ refresh.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -135,8 +135,8 @@ - match: $body: | - /^ id \s+ search.type \s+ search.active \s+ search.size \s+ search.queue \s+ search.queueSize \s+ search.rejected \s+ search.largest \s+ search.completed \s+ search.min \s+ search.max \s+ search.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ search.type \s+ search.active \s+ search.size \s+ search.queue \s+ search.queueSize \s+ search.rejected \s+ search.largest \s+ search.completed \s+ search.min \s+ search.max \s+ search.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -145,8 +145,8 @@ - match: $body: | - /^ id \s+ snapshot.type \s+ snapshot.active \s+ snapshot.size \s+ snapshot.queue \s+ snapshot.queueSize \s+ snapshot.rejected \s+ snapshot.largest \s+ snapshot.completed \s+ snapshot.min \s+ snapshot.max \s+ snapshot.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ snapshot.type \s+ snapshot.active \s+ snapshot.size \s+ snapshot.queue \s+ snapshot.queueSize \s+ snapshot.rejected \s+ snapshot.largest \s+ snapshot.completed \s+ snapshot.min \s+ snapshot.max \s+ snapshot.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -155,8 +155,8 @@ - match: $body: | - /^ id \s+ suggest.type \s+ suggest.active \s+ suggest.size \s+ suggest.queue \s+ suggest.queueSize \s+ suggest.rejected \s+ suggest.largest \s+ suggest.completed \s+ suggest.min \s+ suggest.max \s+ suggest.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ suggest.type \s+ suggest.active \s+ suggest.size \s+ suggest.queue \s+ suggest.queueSize \s+ suggest.rejected \s+ suggest.largest \s+ suggest.completed \s+ suggest.min \s+ suggest.max \s+ suggest.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -165,5 +165,5 @@ - match: $body: | - /^ id \s+ warmer.type \s+ warmer.active \s+ warmer.size \s+ warmer.queue \s+ warmer.queueSize \s+ warmer.rejected \s+ warmer.largest \s+ warmer.completed \s+ warmer.min \s+ warmer.max \s+ warmer.keepAlive \s+ \n - (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \s+ \n)+ $/ + /^ id \s+ warmer.type \s+ warmer.active \s+ warmer.size \s+ warmer.queue \s+ warmer.queueSize \s+ warmer.rejected \s+ warmer.largest \s+ warmer.completed \s+ warmer.min \s+ warmer.max \s+ warmer.keepAlive \n + (\S+ \s+ (cached|fixed|scaling)? \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \S* \n)+ $/ From 9ea18fc5769b4b674ec736ed19b00b8a6d7ff773 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 8 Dec 2015 15:50:44 +0100 Subject: [PATCH 21/57] Adding checks for unexpected tokens in parser --- .../org/elasticsearch/search/highlight/HighlightBuilder.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index c9479aa72fb..3233673f9d4 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -278,6 +278,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder Date: Tue, 8 Dec 2015 08:05:50 -0800 Subject: [PATCH 22/57] Removed unnecessary setting previously used to ignore sysprops in tribe nodes --- core/src/main/java/org/elasticsearch/tribe/TribeService.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 343606e7805..db17d4d44f2 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -135,7 +135,6 @@ public class TribeService extends AbstractLifecycleComponent { sb.put("name", settings.get("name") + "/" + entry.getKey()); sb.put("path.home", settings.get("path.home")); // pass through ES home dir sb.put(TRIBE_NAME, entry.getKey()); - sb.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); if (sb.get("http.enabled") == null) { sb.put("http.enabled", false); } From f2d8a3588850494a59f4da98cecea281e504e773 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 8 Dec 2015 18:05:36 +0100 Subject: [PATCH 23/57] Dynamically map floating-point numbers as floats instead of doubles. Close #13851 --- .../index/mapper/DocumentParser.java | 10 +++++-- .../elasticsearch/index/mapper/Mapper.java | 2 +- .../fieldstats/FieldStatsTests.java | 16 +++++----- .../index/mapper/DynamicMappingTests.java | 29 +++++++++++++++++++ docs/reference/migration/migrate_3_0.asciidoc | 7 +++++ 5 files changed, 53 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index e5e3387950e..ce2cbd4e931 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -584,7 +584,10 @@ class DocumentParser implements Closeable { if (context.parser().estimatedNumberType()) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { - builder = MapperBuilders.doubleField(currentFieldName); + // no templates are defined, we use float by default instead of double + // since this is much more space-efficient and should be enough most of + // the time + builder = MapperBuilders.floatField(currentFieldName); } return builder; } else { @@ -597,7 +600,10 @@ class DocumentParser implements Closeable { } else if (numberType == XContentParser.NumberType.DOUBLE) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { - builder = MapperBuilders.doubleField(currentFieldName); + // no templates are defined, we use float by default instead of double + // since this is much more space-efficient and should be enough most of + // the time + builder = MapperBuilders.floatField(currentFieldName); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java index c9877410c30..33a4dabd3be 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -148,7 +148,7 @@ public abstract class Mapper implements ToXContent, Iterable { }; } - class MultiFieldParserContext extends ParserContext { + static class MultiFieldParserContext extends ParserContext { MultiFieldParserContext(ParserContext in) { super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher()); } diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index aec73f245cd..e25b95be578 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -67,7 +67,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { } public void testString() { - createIndex("test", Settings.EMPTY, "field", "value", "type=string"); + createIndex("test", Settings.EMPTY, "test", "field", "type=string"); for (int value = 0; value <= 10; value++) { client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get(); } @@ -85,7 +85,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { public void testDouble() { String fieldName = "field"; - createIndex("test", Settings.EMPTY, fieldName, "value", "type=double"); + createIndex("test", Settings.EMPTY, "test", fieldName, "type=double"); for (double value = -1; value <= 9; value++) { client().prepareIndex("test", "test").setSource(fieldName, value).get(); } @@ -102,7 +102,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { public void testFloat() { String fieldName = "field"; - createIndex("test", Settings.EMPTY, fieldName, "value", "type=float"); + createIndex("test", Settings.EMPTY, "test", fieldName, "type=float"); for (float value = -1; value <= 9; value++) { client().prepareIndex("test", "test").setSource(fieldName, value).get(); } @@ -112,14 +112,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l)); assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l)); assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100)); - assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1.0)); - assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9.0)); + assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1f)); + assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9f)); assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1))); assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9))); } private void testNumberRange(String fieldName, String fieldType, long min, long max) { - createIndex("test", Settings.EMPTY, fieldName, "value", "type=" + fieldType); + createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType); for (long value = min; value <= max; value++) { client().prepareIndex("test", "test").setSource(fieldName, value).get(); } @@ -180,11 +180,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase { } public void testInvalidField() { - createIndex("test1", Settings.EMPTY, "field1", "value", "type=string"); + createIndex("test1", Settings.EMPTY, "test", "field1", "type=string"); client().prepareIndex("test1", "test").setSource("field1", "a").get(); client().prepareIndex("test1", "test").setSource("field1", "b").get(); - createIndex("test2", Settings.EMPTY, "field2", "value", "type=string"); + createIndex("test2", Settings.EMPTY, "test", "field2", "type=string"); client().prepareIndex("test2", "test").setSource("field2", "a").get(); client().prepareIndex("test2", "test").setSource("field2", "b").get(); client().admin().indices().prepareRefresh().get(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index f01df630ea7..966ea01e95c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -28,15 +29,21 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.core.DoubleFieldMapper; +import org.elasticsearch.index.mapper.core.FloatFieldMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class DynamicMappingTests extends ESSingleNodeTestCase { @@ -407,4 +414,26 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { // expected } } + + public void testDefaultFloatingPointMappings() throws IOException { + DocumentMapper mapper = createIndex("test").mapperService().documentMapperWithAutoCreate("type").getDocumentMapper(); + doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); + doTestDefaultFloatingPointMappings(mapper, XContentFactory.yamlBuilder()); + doTestDefaultFloatingPointMappings(mapper, XContentFactory.smileBuilder()); + doTestDefaultFloatingPointMappings(mapper, XContentFactory.cborBuilder()); + } + + private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentBuilder builder) throws IOException { + BytesReference source = builder.startObject() + .field("foo", 3.2f) // float + .field("bar", 3.2d) // double + .field("baz", (double) 3.2f) // double that can be accurately represented as a float + .endObject().bytes(); + ParsedDocument parsedDocument = mapper.parse("index", "type", "id", source); + Mapping update = parsedDocument.dynamicMappingsUpdate(); + assertNotNull(update); + assertThat(update.root().getMapper("foo"), instanceOf(FloatFieldMapper.class)); + assertThat(update.root().getMapper("bar"), instanceOf(FloatFieldMapper.class)); + assertThat(update.root().getMapper("baz"), instanceOf(FloatFieldMapper.class)); + } } diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index b8683bc6fd0..112b50872fc 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -206,6 +206,13 @@ cluster settings please use the settings update API and set their superseded key The `transform` feature from mappings has been removed. It made issues very hard to debug. +==== Default number mappings + +When a floating-point number is encountered, it is now dynamically mapped as a +float by default instead of a double. The reasoning is that floats should be +more than enough for most cases but would decrease storage requirements +significantly. + [[breaking_30_plugins]] === Plugin changes From 02b1c32261922f266d714e0cf24f193b310b6967 Mon Sep 17 00:00:00 2001 From: Paulo Martins Date: Tue, 8 Dec 2015 17:44:52 +0000 Subject: [PATCH 24/57] Correct typo in class name of StatsAggregator #15264 (Closes) --- .../{StatsAggegator.java => StatsAggregator.java} | 14 +++++++------- .../aggregations/metrics/stats/StatsParser.java | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) rename core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/{StatsAggegator.java => StatsAggregator.java} (91%) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggegator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java similarity index 91% rename from core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggegator.java rename to core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java index 5cc7ddb5dda..6e648cb50e2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggegator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java @@ -43,7 +43,7 @@ import java.util.Map; /** * */ -public class StatsAggegator extends NumericMetricsAggregator.MultiValue { +public class StatsAggregator extends NumericMetricsAggregator.MultiValue { final ValuesSource.Numeric valuesSource; final ValueFormatter formatter; @@ -54,10 +54,10 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue { DoubleArray maxes; - public StatsAggegator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter, - AggregationContext context, - Aggregator parent, List pipelineAggregators, - Map metaData) throws IOException { + public StatsAggregator(String name, ValuesSource.Numeric valuesSource, ValueFormatter formatter, + AggregationContext context, + Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { @@ -164,14 +164,14 @@ public class StatsAggegator extends NumericMetricsAggregator.MultiValue { @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { - return new StatsAggegator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData); + return new StatsAggregator(name, null, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData); } @Override protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - return new StatsAggegator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData); + return new StatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, pipelineAggregators, metaData); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java index 5ec9b2a59a7..86c85e40ce5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java @@ -34,6 +34,6 @@ public class StatsParser extends NumericValuesSourceMetricsAggregatorParser config) { - return new StatsAggegator.Factory(aggregationName, config); + return new StatsAggregator.Factory(aggregationName, config); } } From b0febc1b9f67b0b8c938cd04e632cb18a95015bc Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Dec 2015 13:05:15 -0500 Subject: [PATCH 25/57] make slow tests more obvious --- .../gradle/junit4/TestReportLogger.groovy | 34 ++++++++++++++++++ buildSrc/src/main/resources/beat.wav | Bin 0 -> 156718 bytes 2 files changed, 34 insertions(+) create mode 100644 buildSrc/src/main/resources/beat.wav diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index b56a22ee2d9..0813713353f 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -15,8 +15,15 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description +import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger +import javax.sound.sampled.AudioSystem; +import javax.sound.sampled.Clip; +import javax.sound.sampled.Line; +import javax.sound.sampled.LineEvent; +import javax.sound.sampled.LineListener; + import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.* import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode @@ -102,9 +109,36 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv formatTime(e.getCurrentTime()) + ", stalled for " + formatDurationInSeconds(e.getNoEventDuration()) + " at: " + (e.getDescription() == null ? "" : formatDescription(e.getDescription()))) + try { + playBeat(); + } catch (Exception nosound) { /* handling exceptions with style */ } slowTestsFound = true } + void playBeat() throws Exception { + Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class)); + final AtomicBoolean stop = new AtomicBoolean(); + clip.addLineListener(new LineListener() { + @Override + public void update(LineEvent event) { + if (event.getType() == LineEvent.Type.STOP) { + stop.set(true); + } + } + }); + InputStream stream = getClass().getResourceAsStream("/beat.wav"); + try { + clip.open(AudioSystem.getAudioInputStream(stream)); + clip.start(); + while (!stop.get()) { + Thread.sleep(20); + } + clip.close(); + } finally { + stream.close(); + } + } + @Subscribe void onQuit(AggregatedQuitEvent e) throws IOException { if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { diff --git a/buildSrc/src/main/resources/beat.wav b/buildSrc/src/main/resources/beat.wav new file mode 100644 index 0000000000000000000000000000000000000000..4083a4ce618139af5dc0a448d6a4d3569f2a180e GIT binary patch literal 156718 zcmeFZb#xR<*Du^M-QyZ}S7O8n5}c3#!Gl9^5AN;<4svima1MkJBoGghkl+CV0TSZw z$z;6gC+Ff0{cI~R#HPgT9>Ah!iTuhAjVhET%Y}%-$ zNt;|n004jh1dMzL0D=q%paBAkvv|R#1pp=W>-c4XUl#affnOH*Wr1H7_+^1#7Wid> zUl#affnOH*Wr1H7_+^1#7Wid>Ul#affnOH*Wr1H7_+^1#7Wid>Ul#affnOH*e{BKq z|5zbNz0duh>3`Rl{IBDGC-=Xq|DS#R&$9Xb^8fAeKXv?P`Tthe|4RPf`u_j5|E~M@ zxc}2W|9Neeg{buJHs;blWwU0I{O|hzEe8THmCR-0Ukc4u`geckRzGF`Q^SAiGdKIs zy8qthzw7!>IfDGB$N%ilJZ9Q|C5Rf=PhI9Qn$yoYncJAJ%{`m-{2ZhC`lnv&%+X zhqn*o;9s^jkKa53^L)&tc#69i zPU8lAFD}DR;Gkg>F2%>=U+}^B8+-^}g2&)hxFep8i@=Ds60=<2EVOe zfZx%F+vyXid=4J0--Hj;AI5$3n{j9TI;wstF48CA zOe*ihM&V7^V7wk1fVX4Ask}d4iUr_5uu!}W8%EWSz$>vZyae;c-(#*6x!~Ct6Tgnh z^{21~it6;ou?qbL?5Tbkc3-~~OVO{v?&+6fH}$izKlIbE-}IBQ{rU;mZv8-Ph29sN zrgy-`=q<64dKe4Scj~P5H z%*NRG9xMdEjm^M6V~24amPy&Z4v*LK4EyvW4e9!AhDQAZ0}pRBMB`S*_4owiUHo@r z1)gt23}Rz{gOh2UVS?$dVXvvd@X%yqtT9bAvWVlxfkdHkIiWFLBtlFj#0u&LaLW`* z)|r-)*2EQZ0#QO9rM^fnh!8+V%mX~h)4*J^7&uGfz)R8_6qDmXCtw>m6G#J(165!K zpa)w4Z^#2&2+ab|LkGZT&;zg$`UXO<9`uF%plR?7=peigN`;f55Ab`a3vP!%8VbA6 z0^s4avG79L5_lJFH+-IU0lrU5gI~}-!sWCMxQj-@7|n)8M}lcK$S9gOGLsg8B+*79 z2WV4~OSC1(J=!MZ8SNnQnRXUwq}@d1v{VE}G7&qZ5b2A2LB=5U$ULMAS%WB$eTWgc zgs{;}#0mX|^hFiONR*FGLqpIN=wx&cx)r^M-b6Fdcjy8GFhclTQ#rci9l=Fgnk)z?h;{@t;#Z~=di@DzWQ@EQNAu!H|jC=^JA zQ34Cm96_XLk6^m!hG3KEso=EeyWoMSOYl}?6f}vrLPF#sbg~E$4zw67oMACexYc5o z@S??XVUEQ%;SY=7g=&i%LcV3D(8uzvFxs+0ILWeIxX4m2+-Qjj_gNCc6PBRpq9r1_ zX-O9)TcV;=OGxz4(kOgvsS;*b_6RdAn}q3>mBM?LpM*Cp3xpRe9|=!d-V`3OJT2U7 zxlg#&a*Z&_a*lAJWvnp4GE5k2=`0**$q)uxDg>^U)dGQKp}=I3Ea_ub2s`Y(+`bhRv-$-LF5V}1R2U`pmoxZ)Bd3MqeanM;AZqB zd<>0(`=YH-74iqP7a0ioB3jgf+mwdavBs%>Oc)~ z8N5wIf)j``z+hSfq?lO16w@7&Fb*SAjSa+P<8H!ea3oR;1*S=cc_t$cm{Re-jZ^Ru zMnd0VxTimAn5OqPAo@?#oqILD1QX!4SdRXaZjOErb?5D?dw_LlCt=sMTHRFbbsbMT zTvx2=(H_v8(+<=O)@szb&Jzv$Ua#USaRV(9Emz0=t zn(~g)S~*!+sUQ@W6{(6DipdIFg+X2=zb(HcA1j|GSII2p*JR&iBV>Qb#L}^{b5go& zfb^BLMY2bFOfoYgHA z(oOadUH5w~c1`IC>@s%0>AcfDr*mSrssryj-jUqp*)guGpk32Bz5UNlW!tFEKiZ@n zfo)ehN?J#CtZr4db6an=r?telk8d%xDViU*oo`;y7S(Lg*3k66^>EX!)_|t4*78Pi z%dW0@?Y;LKa+I+d*s(C_vbrZY(R@0}twN0n%hBeKqvuX0F zQ#9&ozchZRO>ewbd!cb%?cT=GwMmU$weuPowNo3FHRBpvYhoL#YvLMz)QoQ|ubI(U zQ?snGy=GS*jX052{aP63;+}fi};@XlX&pNB-1$B#?Z`b8EchzxQ zLhF~e?5i(s`CcE`>eFzcbzcM6R@<bl>O+U3_8)cv3}x4UoKu%4%FUwekN&li7g*NA6zob2uD@Re-qd?R6W&6i&8 z!lgdl=Vf`_f%2H1*Yb*U`-EO^i&VDU}`2 z&X99-&GPfQ9sqwtl z#YAWinwIL?O<#1Qh%oFg;u3}t7&exis=q>}>Dx&V4+19Pn}I*^r$7g;2LcTd;6_6d z_|$M4G#M(uVMYdYz}O!uGOmD3#*0vt=@qoeB!(WFM6lQt1$z_o;CaLW_#BY}zb4+p za-tn}A!)S9q!;ZFIf9l=&ZV`JTWPkyIa)lBPCEpY(h31N%>-B=(O^GhFE|G&0{0^j zbQg(%J|o8=4e||gLG9o%=t6iq`T)LD zeVgktk4o2Ck5IR9p2yvudp5gyd4;a7wZ(6eSE}DKuPVPCUWos3uOR;`UX%Tk zz1I6b^g7|6?v+f@GwQm8;??_K_3H6I<)!l9=Y{#N^V0bzdMW+Kcy;-Qde!<{Q!-l5 z=l z`|gYVHo6b?8|ZH32fB-Wi`@!+Pr04;o$QwAYv~r~`@>c5bKdo>&lJ}`e1xvke7?9i z`W$y@@{V!202<5lal-Rq3gAg|F*CQs7wnP;x!X3sT_!Jb}@ zogOs~H$6@{O!XMzfO<&mbKI}lFLaNy=eld{a@}s(&2<}Rhq$S1(_F9EPIVn*i@Vg@ z+;rJvGt|Y!rqTI@^-kwRYfER7)dQ!iRzsX3tiCyxSk8A`U@37>S!{JUWC1u>TKr~z zTg0&s5uLKj7jo@J3V*l#BtUE@3ijBP@zvH-`Ae<8^UAHp^P;WZasRR$z$GjmaAsQA zaUP2fv1y`S)->TnR*E2lsp9jPgZV2Mhj@kb_gqUl%3Xqv;N&2?*$|S=8b|A9o`;>7 zwNO069okM`3f@BR0mVoUDWUn1JlbL+2)`3$@(l)4OVF6 zVV?{`bU*MUZKM8@rVD$amh0M8n3kMX znpjQZ#?VGZ!`ud}{%Ac|pHs)KYpb=cwWxKi8Bx=xdSi7&)$OY2%JRyQ6-edSAN?w( zl&|5B@ml-LVgw-*y#eyct_8f1Un5{I&i2U9Wz7_u(b>*8b(%w@F1^ zZ(bKJd*f8t{d&iXwXbUnbgzaK9C&pADbFAnC-coC52@}e$RQgAl+Wx>SU>jhT1dkUI!mKJ2?Oe^?3 zXI#OuoVbFqIq?ONIWr1;b5<3&P@%zh6}Lck#mViys#M_~YZUisPS>Do>PbsXG06fAz&LCu?q$ zUah_N^KoDQUwYeY+U0!9wsgLz#L!8#}$ z$zCp+%o%4fhZ|_Ql*hA5;)|^|3*KAr5#F--O|;AQoW*pzzbr%T)2uiS&#gNhifvvx zR@h#1YPH+qthArv0yy?{rNF_>*;(Qi?DE-tsOv+IiEhU|7r3wVTJJH&d!MJD&nYj4 z?{)7E-+Ml<{IY$o`xp6b>hsAzE}){1Q($9&EU-K9WsoxHY_L9fQHUudv@Z|}g_5CP zLk(d!!ZhJ4!+RsbBbp)&5oP^~B40)A@0Z?xNYu3fc+|mx>HSv>T0CG}wB^8{!LJ7L zh9nIV53!1VH#9%`uVIS@ZyU}UGGWByA#Nil4ONVk4J{aTbl8y?m*MfFbB1$bM~!$B zTRLKE+_aJ2W2#1e9W!;*=CL0~*^Cdjt+vAo@dLH+65*8ORIb_WF$&1HGCZ8WOY|4u3-wDVsu$5l*!lJ9?`3 z*v(V>j=eTDV(hc2;bY6E29E8V>O596l|gMT#AD!u;xSOdjWPJtHDkn6qsEj^HN@pk z&5JudH7Rc4RL8i0sc&LsQS%$z(pMmwos)bUACqZ}qZ z8ks!NWu))K6C+Y4XokB?m_GdM`0Qc8c=oX6@pFfM8uws`?>Noi{bL6Xt{Ss1+HXwZ zpe=Fuz@pgDfy~$y1L8+t?|(ApbJWLCrhdq%KK-IcPK(?)VqZkc@Vns^!%D(v!xW(* zLoGvR4C&W*@8Fps$K^zkkU6k2z-NG}&xroqK8aDj{yX}O@H-nh*Y|$J zcAui~3*J>>*~$OaF<>H(exzXgo(*;T^SLlePWuUxoTXa-fq~fnvS1PhUu>YLzu>(}OFT_QY3cQOF@|P6O zd7koO*)ws$wx{6*C!Xxfzmxav`Kw&-=WRLrpK)?pp7zTg{d9R&=951&-JVosoXfMx zVCKzEKa_j>F`TP>bTntwBT>$cw7;`W4A||J0S~OQ9^Eg>^u50>^WHtL z%#eFc83m~~GbW`j$`IcT%=rDTAwB5sm-MQXyXi+#4yO-HS(&c8laQWwCnkN*ok8hw zclxC}P^3&Al>Q}oWcs7z3F+sO6VrDmZ%j{0KAAp0`C)F*{i9UjMt{jwAT-_X1tl7O@Hfe_3Mw@YGj{I*1CL7shjrYRsGr0=7zGbaFgwK z-{ys7lUmZt_q4(lkK1Newzg+iIdodoOz+xKd#Ss=uBIoZ!L>KDaizq)IahkIg)Zl{ z%~YIdf2`znqU!Tq3pCC>FSHND&e%Z7etoI56JH`9ZzL4CrfVuUa)9O}P_MMkOfTu4WF+$|@Q}3=a^rZ>E^)sg9R3#iUV$T1CVb7BXR(6w(UQ*#v(DjP zvza9X>`2iP``eZu97bCQI(6Hec0O*WbO~^n;#T6A<-W>Upm}|}gOfuy_w5WLLwzEC3ttk+ioDtH_kI;o zjQ*?v2L?nB)DK)UC@K0{bj{#5gGUY(54kstJB&RdVED3;F(V2`%^qnvdiAKqvD;!Y z+J@I?TJRQGztbD?a4)UA+u$RGiKjk5;$kh(pz(oWum!vm+hQ8e0js%isb|5EnabIUeAiEc`H^rCN{2| zo*2LC_r%Oqxru_+wTX*Y>k>0obLOj8+s_Y9a-F{@$z}e@B%Ar^NzC~llT?YdNk0-> zlO83uB<)SCOo~l>lf+0&Nh+MTKWXi}$w{{J9Fm^StzSK5?uFGIb7EF+m?K}Mn|)^0 z#@T&VbhL8EQ}!%| zr(`X1o7}jt|0LSNF%#VuBut2!KYM&^;;i^d^QMfOGB;*y{G8A+gJ;{s`OMOdX3YE= z(>6V2)YEBOMjlNVHe%9L#&F9ipN5uAIx*zL#Mr?@CIHcr`0Rm~$1NH#cr34fb6jq} zy|J?*?MEBKACI~kHe_UY=*QtDA(MtJ2reI@3Y;`}f53-9>^_4Ap7nds-`dwY>XP@q zNE@$?@CzPe!-VdS`yO`{gdi?!g7!Fl48R;b`>eCy?bl^n?vrTK$GgJnfM>jAmHS&! zu-jnaA(tooDyJ}BfMYsmpM4O!%=RJE$EFWsht+-bi$x#AO_T<25`;j-yezOQ+llJY;RO6B<^Y6)P19=r*mrK!VX?TL)*u?xvj@*8=EKA zENQZ@mNax$Zm)k*!K^#^<3`Q0^8VE^WtEk_-?vrpzuEuLmlloNkwt-$L9AZKPcWM6eI6k-#fmQzm0nH_RWmfH(&31wfohbm-Ani z6~z>xMSTkg7rMOI{=%Z*X#pqSn9qJbDqryI%yWmQts5b z%%F$&GBY2n%N+C|I`hkYVdk>?y&25=pEI)VWoIn8cPqpB-o=c@)IT!rrk={!l6o#< zZ0d~+@6^W`jMU-`#og|Vmb=2t+PlLj+LGCPH$78&SDp!__RDfkJ)AWxwK{86YFPHw z)HB)NQZ?Cvdx<#-_rB!Zx;HXccdsaS^8Mj?Irl&1IX{^3{ zrX2B`oZQm4C-d^&t$K3%{kW%Ri~Bx1{K4V*u8)lT&7bu7>q{gBYd^QWSo@{1aD8cA z(blhZFL!@ycy;7^^XpS(U2iUzE8gDvVS4wdf?J$l>HOhSRp`gs>ex?`n)xMU?e@>Y z`U_vY8nQ~G8oz%X+ob%K*lba@p(UdHNbAfW*V=YhJZisP`MTp%RaK|DTGHiGOY0d| z=OEruAJ%)fVT`1_alRC6-Yg4jIVPXkdR=k2?XfbWy+~Ex@lDO>YSM&v%eC`+NZkoB zSD)8wkGD#^4Ln(haiF}PX}KbrIIkQ^7O922ijk-m(CIHW7G@xG9QX| zv9?=mWsk62&tY4w+rM7X-L+pB;t?W;_blLZDedh4q^>@bwZZn)PH!tT??lNaLk6f2bj~%W< zJqNgzcxv3Hcs+El^qS`}-J9!K?w#v7&S#ESk&nqM*!QaURo`$Q(67X2iQfX>w|;70 zcmG3vyZnXz-~F%od-w6^v!&17KCk=u258R!+!9Ds#i3-}N|Ip9(_8n7rluTNn3 z(mu*Ci$2+5FZ?%#E%Of!QHl=P*C-tx`uJJNSy-!>ms-vd66L!5jj zhvayxg2#BD39k1F3QqERA7u2L7j)E98ED~oFz}j(AkfX@O28d=mjG|~TYXa8-1~UA z-Sofd>fmqZdcp6E3)heCa>#eLv))JHw8m$NQ?qx4<7Dp`$5&q24k2D{4%a+S*dv}= zyCjeKc4h7#ZKK@7Y_Gdrw;^3=HVa%=S-*7oYUS(_X0_eja+1w`D6;2qfl6@GqWLH5GS^m&T<}UCf;|qW? z+<+1E&E#J61Cfuo5_;NJQzY%1aUHQ{McZpg(*zk9nsxYMb(a2>N{6|r24VY^2XrlpkJ?cR3++AmJPludU%gePQ#DIR zsp6&QmCq&ZiawHlia&dg$%Vc3vZLa@GD!TJbWcyC#Lyij+1`D&SJx#MZ|NE@)^tAZ z+1zQ-qwCn#y`!VK3+RaNI@whJwq*3gy> zt>2pEE$f>%v{*E2oAa7>HP3BAo6)9oO__}zO$!=x8bys`8s9cFHSB2E*3h>>(9lzV zzdoftrhajKTb)<^k-E;hK6MZ4%4;{)?X8Wj3#?_;HPqDBo~wCOJGSOsV=97giZt7hk!(Ze``ey5B3C>(VN% z>%UjV*W;B(>OHE8>L*m0>UUKQY)Fy+yQ?LzuA*gkU0Cb4y0fj`^{Upr^$Be?^-tQO z8XViNH0*CTG<3DkYn(Sps7#i#ipyBP&2D*ee=$)=4MIPe@pk8#^9L6dgg*^_`ETs?H$U)~<9J(G?~?(w!?8 z_6$_~+4D-#M?6OPTwJbXoTBOZKYSQi0}g>0g>CS*W&D_FB7KK2--Q+I80z zTd*i4Ltn1EO1*ap#5w9hJXJl}FjCWIXwqyk?$I($PP*%+d|fax7kfhx*bMS7eJ?p2 z-wU+kBJddXP9?w?4t+Jgg*KX|!PZ0v{FK;2n?)iBPM$}u0PbiM_!#{G4x(>>is?Lf z3gaGJ&lpKt%50$ZGPfe@S$tH(x`S?E_or*wpXr-9i3|lt&RE6W$86;aSTlJyS*5%{ z_6Ytnb}m1L<0tsexh9y)1Ou%X(pv z|zLts5*kHY2TO*xa|eZNsw`+ibAzYg=x;**4rJ z&-RRs&Q@X*X%}O=$?k@2x}DlqY!_$eX@A{rwtcVN8T$eD@9YoTWAUJN1elTM|;O}j)NROJI--LoOU@5aJuT4@w7;z-55b2bVCX?=JpMl`f7>H7;DIS{H+3l}o2%nafwlk1kIfi(LM6%yQZ1 znCvp&@q)`>$3rgmjvHLm4)a_}9b#Ql9s0T)aIkZkKK35YIy3n+f`Xkw>xafwhOf^ zvTd-~Wc#~?k8Pwytxb#Qgw07&v`v(#*Sbx3#rlkJq;<4VX4NCOY;{pE)M~h(!&1sW zZh4s>Y&neo-J+Yf(c(1E!Xko~A*$g{5bfrQg|6J+gfBVn!ugy6fr&jsaFwm%_hX;r zm$AJ0>sW<6K5GW=5mUj9XP)3n7(U#Sj8~jM#$3)9x}Lp`evWNH4`An^Z&mRs@Ce$ib zraD)(OubX(tRAYWP|;O?sJ<#=Ro9fTYPs@-GE%uqi6{pt%c=MC$%>bXO^U;c7)7kY zPQg-0)@6vIy8&Ypsk`$Ch zOY5Zq>2qnjc*+mbUilB(HkgB)5CNOAhrGNtX0J zlEn62lLYl1lUVj{l^Dg#ByHlElF#CCk|*L}l4NnD|cp|EfNy$KBM+wp6EHU)BO7I>}iu@=FmgsvTBv?% zh1XhU9v?kmt2tZq}lQS z=~ww!sYJe7%2b??dMVPRBNShyixev9A%%_Xu40hvgJPkqS8+@xP^QZwl-06XN=Uv> z86ZzpQks+pQLoL^%7;i`jWC& z{YmMl0aW8PVXA$aWvVRAbybJvyUI?>QpanDs}E>*sq?hCYKc~%_S6MwX6sgI&gxP% z#kwvHsq@iBV#~B^u~hAStXtcG_0f6g*Xri$vvfD~I$a&*>t~PeqI}=-u}e6P{lFvj z7KTmw35G2F?*^6rwILEW8Mfn*#(aFek;ESwhZ{PLzZpDCpD7O(OXF439Al*^*=SAl z8mALsrmMtuQv*?CawAbo5I>d!zp;7QrXcy(jmIWhl4?F^Pr|pAh(F)+RG#oCb^`jBA z&9pEio3;#5)2<;A$X8?wf}&55XcR*>qy5lF=w`~lEDLp`%h6f%K>9iQ3i=0n3Z104 z(8C!{jOC1JjBAWPC~va&43c4D1T({!^O?(;e=z@KzF?Ly#Y`s4mNkSminWEco|VSB z!RlarWVy0+tl4Z&_C@w&b}4%wo6dR69>{5AuiA|7j&tYm(zu6tpSfwgUhWSbn`h+t@!TnYvKam%-U|L+-f{jF zUK&4xSIRHuDftaNOM#jfAzhL5VO-&?ForPzpy0P|&MZXEoh>i)aicSb_iB1aciT)5g7M&JkQ}l$oeopc8 zDVcoH0qS}u#oH*z5v>$tiWX3E36yVJoZyaVpx~w`KyXpyEI2OW3HFIh{0$;8f3c{F zpCEe8A0ZmOV``@0~YyGn4K8!y?wU7S12ahxqo2TmNbhwaFG#+EaV zvEMLeurD#Z*((?-)&NE!i^Vw3s-`Ef?$TW;U$idfNcv-@h`yEChz@2xKvCvqw3IOv zz06>v3mH{N0OKy@iLw=WMIVivqFW;~=xsDNdKRqumH>rE|`2kSLr znQ1!F$7DswsBeC@@tSF;alUDk(c2_4ijB>N3}dEYyYZl5m~pCsW%Qzav$gm&LovR{ za2@wItiu(QhxJq30Y8At_2cljdTac;zCpiHe^)&35R0WDPs=JDLsFHqi>XDF}9QOQ5amr7jUq((H&wnv@gK@8mDw^UWxmW;`BoOLERhXW zmdWD(g<*}dWy)6BMrF5bk5VT4O{tNcq58V6MC5mse0df%h8Id7d9gB5UZ#wZH&9sD zqg)}!lsn~g)hW5HDp~HQ%9jsRRmjJy6!JwXj$(((OL0~;MDb8HQ}JH4Nztl0qX5;9 z6fWv7ilJ($VxgL+JfaR#-dD#fKdV^KepB_);;M;SH}z(1too{Ut-45iQQf9}p=Rp3)PXt+%_Q9r%~stS%}w2P z%?I5ljY@}WoH1W*9G0Nni0#u}$L?w0QhwwT4A)xeJ#~Zh6LpIyFU;Tdk93*(MqRaD zh(Y*p%Kv^NHVse4_EUcN4{=0aLE$HfFW37TuIMKjKIwNFO!~Wq5d4Q>F^(E9Q#kqs zUur}R7md+|x5kYIl`)mV(mKOb6VG_qG{l%|T4(Gq{cW@&%8bJanrRIYX}U~(<6aS$ zO})eilMQJw4I=}IRpdhA0(prjBtKL53Xv8-Bsm0FNv;O|BrgNs$hTB7u@czcti|jw z)_V3()-Luq))DqJ)*1FZ)}QRftW@?&Ru+3L>jirw>jQf$tDL=q)x_S#>S6C?Y1ju? z0Ot^k%{fAO{vBnxa*nfnIj2|woHMLo&UscS=Q1mTbA=T}<%2jES%WFw5YAaDKTXj| zRy5}r#XrCr$k|EBY+{9RlBjk|SpJ+jEFaDkmM15U<-!@tvf)Irc$~g0nBz}{k+`xH zY%5k5o6c%vW6W}P53`tE$9&HI!c1cqGXG+yGtaXBW*%gpV{Tv{W-efFV@_nRU=Cr= zpynIHbYO=vnQUjKmc?Q=v$Tv7Rx=}q^@;H(E0=MQ^%r9W>lkAqYds^1HJ#zk8pPnS z+!zKX%IIK9=wF$Y^ry^M^uL*p>BpGY=^L28(PuL^&_^(5)BPwM<1qv13Wg=UjDb@g zla1&V#%pvpBL$t$_#GY2NJ2drlTerufwt0Z(KmD)Nuf6&ztLYI%jl`dX!>cyi@pt^ z(G!tYbQJO$^+S?TE^-)^)0UxSv@vKdEdaeraXSOKb_Mz9Y00=|b{fZ5cyK14vYiu0K>uSKtJ#t5I{wBxPgZOYj7990XG34xC+n$i+~I6l2fhJQfOkMV z@EjNgWB`MKR3Hkt0fYe;fDqsm5DXjz0)WGSA8-Kh2KEB(z;3`9*akQN8vt8iHDE<) z5&<&+9xxVQ1JP7`S`ZbJaB|nkR$d}|}@;P~n%q1_7 zndEUYjXX@=B@a-X1LQSwKY5PaOP(ZmQ{3&;c{90@Tu-hbSCfh4QZj*@M~)?@lS9ez zWIr;73?ZXQKQf&3B>Rw#)LBG2kqBu^>Ie}jCYWRsVIrysIq{unB}#}&;vMmscuBk^ zo)a&KT;d6lL8S-8L*h1(N?fJRr->WH5#kcDi@MrC93_?!`-r*3Hexcdju=fWB?b|* zi4bA};ZBSsL_~iAAOZ=g$&IKrSrG3{G$PBSGTk(_m`<3!nzow?Oe;+HOtVZEOyf=a zO(RTeO#@7GO?|17k7=mM(G+eHn*2mw%U+yNHE+tL>n#`JPmuOn6>2wIX=lyh4(kS#N7;O zIMZ+ym*c-tv1yy|S9l`+5Fd+Q#{1!ia1VSf&ckO@@oKSnyFL>Cs`td7>qYn-J&6CI zSLnCtTl5R{-}U44FZKQOkMth;D|&(cpx%hBq2kbH>&vMaub0>W{bQ_;{wC(6KaKJA z2Qg5;6;o4tuP$sM)__gN%CWIn2{ssegN0xPm>ZUZSzu`xf+b^G-Je*8?jlyLJA;+z zj#IH=hp}h6U09}WBX&=>3cI0Of}PeSVh40HvCX>4*h<|PY@u!_6*JZko2CoFChPpM z@j7oTPUnS<(s^P-DH=pkKb<=jx8;U~=$x?tofGD#bHu!K4wxI2J5yx`if>ERTT$(V zln%Zwkc#yR!7OxrF&kYd=BSIHWTP=}T`cv?I}HodEyAL8>!^NqVN-R#VTro)*b3b( zY=bTx+odbSj_OLW3%W+^Z=DiL)1mrjI!FC`>KC3$-6$&dZH8X2Tcu}Fv2l*rX?-Ae zOFtCL)=$CSQ?Ycl`dyepe+FaVcd59$7g!Xv7nq1ku;n;SzmJORyNZYFbEtT-ulO>( zn2H-j4L9^|hUfZdL$!XU0n=|W*x?rp1MqCa9BQAi8^@@fvZL`CKFnB&FEOg{-;8`i zs?pEz-Z;Y0Zk%m^OdAa@rsIZzrewo3Q=wrkwSPEjG8k@|9E=5~fmHn293y7hW3(au zHufXlQP1VQ#%+YqbcKj8gg1~!i~uUA*fce<0}zoHfe;c8V5!AjU23ia!KpWx3(0+J5^as2fx(XkI zlHm(b8hi`NfgeH7;pfmx>NkUTa2Zqr*F)do4yYQILXEHvYKK9%8|K0i*bbJ#Ua$fV zhShKsjKf2z5aZD>oi-8X(q_Rz+G5y}wi>pfZG`P;TVY4qHrScUU1*!Bd?V~YTTj&| z!ED-cs@+0Z1J9=PPJtWXv2YnY68-=Wh6~~Ta1I;+--r9cf58FpMc5lY4!gqpslKILCg1jLFvWD~^1L_5hU^6HOE5H`81S|)O!1rJ_ z_zX+|)4)sM-{3Lu9Jmua2quB+!Fg1g0FDEr!Tw+X=nXo6mLLlRK?5KKBtR|D41557 z08fCA6au{ft^!#U`rM^Z=NhmT_=7^41Hb}cBai?r0me}0!4z7B0ug{0m8_^lGw(ww ztf{B4<^zQ_xzss@!kLTYBl0MjOkv4&3PUcC^T|`>H1Y^Jf!srmA-7N%vX&f1t|SML zOUO_PWBe&}@g(C(7mDjZVT?5yO!7z{3RT=lBVkLb2tL_Mpky0C5OoAbloM(SS>zO^ zh$&?0pm3#y$R`?!r$jx4Ff~LrQ9)5Dkwbi-=p99GD9m|JVa_MwF7b`HL!nX%C3~M} zC(?;tB9GEnNEnHa1Wf+;7lH{XB(o#cge%p*7l~4M$EVQFmKv8E8A**SnhYREQDcuM z`;${BWSl|9QuCNdE~C(L9XX5KLCyI9RXR@1^gLyUo8&zhCj6cHw01&+U?UZIY5}i-{oy0< zF!%;M6V8R#!KLtTa2HjAXxXq66=~X!)&x(cY2np09_=@p7wtCnG*L*KMysW*p=oJ{ zX?)~Qnh%mo8;pFRB_JKNB*bLi>mk<=UnB#GM&2Qlky>OKB13i}FnR*9MXw`)=tE>U znvYCFOOVxQJ#qk*B3DoXNk;`#e5pHHi$+k-9ivf@J`=T|FGoG-+t4uj5p)Fo0y>3$ z2VFwXLATT2peN~N=->2aG@CA?_Nhkn8=XO~qgznVCeCy@-IuPThtLTsK9$ZGLKjj| zL5_^kbYDgsJ(Mw)K8P`%K8i7kK9-R{pU9X&PhiZZ&t%M{o@Em0a~TULT11~s(R9Xq z`eeo&`Z&fkI^{1*AHoO;R7{~t8WkaZ8r%kL0Ovqcsr`Bc=nGkc0!U9GQa#WNzNQfB zK9ElB!!J_%@7>@MU=cVThyx>mK+qWwfOP8VSw@xt)zt4{MdWoLg**TpCzk{3$ao-u z`W_CX{-eN^lu`GeuVfdIL4G36k!i$6@(eMB+(Lwsvk3t?l#mgg)Ln>4+#@8W!^Bt9 zLL$>Ngg9q%BG#L5(?nC1DcF=};+Za*WX5f#QsW#`x^cMaw9(hJ!N@aBH>!+-sokcJ z@wriGylFHU4jOw5ON^C<7~@-mk1^fAFkUru8}=JM8J7Nk99;vL<60Yz(A?=X{OLL{x(jcjd1YUM2R#HS=%mn=Ht$1I&F76W#igU%) zVt;WG;BdXf7_pwH3(;Z*j0ax`xx#7Tldw^^BTR>J-ym3PCCm{j38RD%p{JnmErcu> zcfE&k+jZUs<33ba$$R*J`7FK{|DA8lKj+KxSNR}7@U*~2J~yy{{}f2%Zw3bQ2LoOC z6@g~p>8}pX){=a~KnPzlK=E9F4+#F8K#u=s;Jg2I;JN=n;JW{M;H3Y2V2}SqV7-5T zU@^E~r~20gM);Qndixgy+WBV%8u=#$YWl|nD)@&5O85r^!u@>%7Qh2(Xu7XgK=Abp zczitqE@)ZMQlR~S{U3dO0lk9^MqZ+#yF zzkGiJ*}h!hYWxA!NAgC02p{4v#V7b{@zwmT`NsY}d`JISet>@-Kig?|HYgk^yY$VHHZ zivgSXJWyUt3N#npfdQhCpC*>%H;B#nlj2bRF<@Fr;z6Dl@AJV@5?@0S`5sc3FjcB6 zY?Jy6SEWV5R~XlNrB6bLEDQDIGU5=qySPeTAYPVFiOKRe(V!qwZKa|#T$Bu`ey$%oXP@_Y4}tf*eOqL!fa(|Rf^wB^br?Yi1q{RtcrhBVMBBNMfL$PsNR@>RQlQ2HOFE{r|L>NU~5`cU+(z7AFN zTWA#|2gaR2*bbx__7?dU!_nPXBN$swMT@XgXcV4=w!_WX47@pZ6rYBD!cSlt{u8T2 z81epq8muIy;#Y{1cruZUQ>2BcPBte7k&}s4VqNGOFBonC7WLs(vTy5S$ zk0_J(s0vgj)sHgL%c!dK8Mvx^q-N0`YA;+p?$Wg(#h@Rg7|f?R!x6fq;U(R`P(XJ# z1OxZf(6HDr95M{n8m<_w8r~U_4cP|Ts2OS)!;J%hiCSjtZait6WPEL0Z_G2E1}^HU zu{`s~*p?|Wj$jC8ArryuVyZ9~nP$ujraP0u3}rmb1cqd1Frn-MrVP8Bsl%>eTCwYy z?(9ZpFuR!<#cpN(Ww$fa*&WP0ST2KoYuR>Oy*pe4fgNN59Ly*smu zZ40dtvzx5}tsJb!F+15%xSwOTvN$|mFm7b?jceI7<4X36aS{8}IE%eu{Fglic+_5F z4|c7w8Q@Zt*^z+5bp|}HmeIjP8b30c;TGUgN0_I;ksLS7V3r#OGb0VHm^Oy;Oj!fR z7z}P>9{t_;n!aT`MehatX})n3J=EBOZefh2OBp4KG=8Tt4OglAhAq@i!z605p(EAN zP>L#RkV)L|nM?(&=_b9H+)Vc;$I;cumNZSq(rJW1-6XzHn~Ag3cw#x#f*4Fi5Vfgd zoTXmiDdYkC961A@OLoTFktOj^5@rv?YwQ}a7h6h<$GQ{su@ZyvgI{k&v8Mv7p(tLTYG(;XMwUAp&W#p<7D@Ov_KubR%{pOLBBc7Iih+CzX;yme& zI6^uPyv-3n8g_^!r8S~WS^!AHRIykbE2fD<#gAfN@qyS?yePI64~tF2Er2>K6RV3e z#7g2gvAj4$EDcwd62SJviY?&^(-4-m#CWkPAQP3uGGaxstXKj5%8ONCtCm+BG2&lws#qv4 zggl;gqE*@@CIBu{OS&qyl^%)xq>thRDMef;<->och^HizbYF^xt7RQ2SL!S&(kLky zKAB4LCaIl#TpA_cl9tNvqyuney#-l9Uu2V9AeWSlkO5O#=^-~$66Jxwjn7by$(xi1 z@>%6KeENPlQ?V+DT2YBn+bRvz5lUZmkupQwr)*PiDVNnB%170w6aeOA(yFMHw2o>g z!21)m73zBJgn9<>t5=#!&Cx6xsn>*=MqjOsK35x~@6*=kcVW(+1ao##bHL1l1P&kp z>7_SArs*&z&=(GHJGD+MNeWy=p)RC{l-ebsJ}i&;@z6Z&m-a; zEMLI0KLJMLA-vLE;u1XTMR@je@GAciJK%N$+%6|J67z`F#AH|-MJy%;5Oay{@Q&LP zV~G~T5TXImi>O9)BFYdg;Xi0d*oc}01^Xd`;L~AiG`sSEh zV0qXcECpMOeF5g?C7=TLu|e1stP9-J9NUZ4#@55!Z9X6a6R=>cA1Er?U;6E%g?9b(kr~!i=7U2r@y-)3RXp{z-eJ-Pg{;{Clsq z8xY@>+6-;FHXQ!C0P5R7YoV3Vs%t@7yoPFKzHLTT82dO0h=e4O#R8*~^7AtXTiegn?DVTai@hbU-Jkg_3r zELGwF=T*fo&@a7{ev8k6CBH9S6t7BqppRN6?vo~p8>E5ae5t*dDAkADu!_(l$B2z2 zn^+k*$T&$9f+Ux~Kt>o28DXmUMDU9@g#z)skS!j8%&YCfZ*c`=fz1>?L;lxWaggv7 zvcK+#ZG}sq2RbQMflROx!fwd^+APw-8d2hxiUs^!P`ynNzw%=M7ac6#<$H>k_>SU# zd@Jz)-$dNW*Aut$HN;JPC2<2^PF%;AfxNA<;tIZkxRkFVE&?odKHNW-uPM&rYl+kO z+Tvt*Od{Vv90QLBOjsPuHy8WEvNzvU>;bJa?C$`}Hn42PHx^s+4PjXiw(E-RpmpHu zi(TNpo_tGqTzhdC-%T9L!|a704zH02uQ@|p$uAN&@@qh?w-w&!e(?l<0yx|Y;w}EB z_?UkrzT@ACKY-6o=N)1p9}qR3kSu~-Dj}4RYD2bK2ceZTT<9w;5GF`_g~iehVW;#( zxG3cdZvfxThR+?5>xfZuSFyG{PV6Et6UPF+yA<%w~Q6|bJb(dUIeJb}?-ST2JPC2P|hHJxoz;e$hrL;dv zH;qx}YmL;?+9dS{C^2yThgwZHX@m8a+FE^qc2mEl}5 z=r8mMI>hjY?q@I?_Ct18nqjt~gz>y#xG}?U6vnw3#!kirW|46ubJ_SGlVNl+K};pK zH8T-7*YoTp#>M6`lE`r>jv{n z>nU@w^@%y$mSk=MYUW`!#!WOF8={OKtmZ zOH=z{OB?$MOK1BjOAly$?f+Q@+K*a>+V@*V*tc6o+t*vh+LuC`3+q#1-#E)K`%udO zdoN2*ds|C8dwoj-dj(4cdzdA{j#-R$rlE3%&EURjrON3H$2dDeGPWA!>JAhk+I7~iI;WtFMw2bol7$h(y&xuWt&dLWO69+j2S zq)Xx?sh5aAa^Dtlw~!##6Rrxs_}0Qq{x=`SPvmb02uKp#7r+8F0;m0N{N4S-{gO}i z9rtbbb@f&DDc+afv)lI*?#k{Z?jObV+)Ikx#g&WC z6#pq2RlKsOWO0q6%%Zfy6GiI_#}+j#tXSkKC@S1raJ{fY!Tdrgzg6Mcd~@O8{GSCu z`G*VMy2cf(bk!_q<*T?+V&cpeeoip+WI6LK+c9zQb<{|k{^HN;f^X|FEJV-f=F;y$NoV>&}03uQ|u$-f)h9eZz7eJO6>>M&|x>j?c|;PRkXX3vxNvs@zhp zZMltIhjaV7F62&eJeU1b94;-)Z zvmL&Cb6$yphI!o!#^o(7*qwK);7wjpftFXku#R(7;biBL!t>75LYFhTsG@67QKIW$ z(Ro)!k=s?SxK94$;>G#5i{IrF?#P0!?g<6^-S-MyZcAZf&&a|po;!s(p5UTJ-YG@f zyq}7k-WtWNeA|l;`}ktbH_$!Q|H%EsU)EDPu-UUFpm+)bW4%53AKu%1D_;rWvTuzL z=XVPm{QX7E|5%(7s37GAwn;;IP5Q-;mwO4n05GD3f z)__dJ&yy0KK&>S9P*yUC-brqvqX4lzOf3gwS2CQT6OGjj$;KOo4xm=K1<8bE*t#&Y zzRD1$D(n=~SvJ{J-qeyiWje`~0+r}NZlXEF{KdS^T-SnF_E=_FBujy%pLK-wy7hy9XDe?nYu{~OVyA z(B4tCLb)hs$n(fUA&Vlrht!WmL%b1}gU?3%6FfA+9&C$v8uU1PdeD^cQbBRyU+r(g zme{9;Rk4SLCE0F;F1Pg$t!R@&K3ETiOtv-+u~}1tuUh5>_qK!t^X3adyUcCCLy=?AVt!~8xNgP;T)F|_rWlr) z5Cd)6NUvj~Xe)b)+RoIb!k8!I9%DD~O{5X~42eXHLBJ2wEAUdZ4L?mC!YWhMuuJ3< zvV7FzDJ}I@E{GrGx?)@TiSS733aPXy{B;=3)exn?1z}B~f)E+_4}1_M_$K~? zfzQ5}z;NGwzt}UntQEhhMV=6+#B7;i-X*ai+30QEQ&9l zR&=_^R#c_ZiUSY-36}-wik3Os8jGK|7ZT#{8{;eD?ER->$WSxHOO_=iMg6O z&p5y3^>!xa;Z7>=YTgmY=)A^`$h<$fA00Dumpg)T8#*rK_;NevoX>S;kIr3_T_U$) zc5==K$Q+!W)h8z=D>&zM=9lbQnVYlAW_HQ`ong&hm+?KTX~v-}fBJ;1i|GxsCZuy& z<|m} zYRf2{%BOo%Qq!NOJWt=3axQ&l%J%eLDGSqUrTm*7oH8ukm)s})M{<|+JIQU+4<$EA zUy)oVePVLe^uEdE(pw~#Os|q0mmZZIn{G^wPZyIC(p^cV)Bh%wPydxvIsJ1|&Gh$4 zjnm&Ib%Hh|{ZrCp*uOMAJ865mH|bP5mi#c?lAM$tn=GbROODEDmE0g>K=Oc$Dams) z)+X=EIF|e%<6&}UMsl(-Q%SC#8J#j9vvJDG%pob)Gnb^~WS&R~&3d2GCW}v*m6ecs zI;(4Ha@K-Wd-nO%4%um`OS6O1?q+vN3uLcItDf^PZDNj?b}py(--4W}f2-zR{yR0- z^Y>|@>S7xBK{SH=!iVSwvIW+ZdVyV_+T$_wdwd<; zoe=5o#8^W=^0y(0>|h*B-7)^9$}uD8?MybUGh+@Hiipg#6hL67XILFI#L1aAvk9;^j@2_6?57xFbYF{Dw*osgp;Oeh&TFm!b2+0YlE zQfR5L)?rJ-Hio5zeGO|69vZ$aykEF8d{21uh+pBmBSIpIBf3Sji(C_NF!D)+CsK)M z8C558Yt+QZw5Su2m7A4^D!6VU&Q>0Pl>@x_+l!R zU}Jlhh>M+9qGs&T5-nq2mFN-cD)CP&n=mo9e8Sw=)(OjEhb63wotv;Zc1yzc*b@o6 zVs9kuiG7~1H}+e??$`|2SCp_mR)XWOlCxv!l8Lcw$>Fi)l6_)rCELdamuwOnTC#d< zNXdj)dr4cYsiYc1lyt;M37=yM6K=+&Cme|RlCU)9S;DB8YYA;*jwFdq~=yD0OqM3v)(asX3qFEQ2?gMybw znu2Ra-U-T#7#p-DA~>i?#3OrNc%pq%y9CKIW`-57Wllh)K1i8E06Q8d*ze<5BYqLw)mT!)J~#jN=Z|2-k$(VM?XSnif;H z*aWHr`;1IwMv&7Olr%E?iG#+vL@nbt{H0+U-p^pi^XRLPao8JU=>U3=+J#o9>Yy*k zA4q?4CX!EtAq$AxI!6rB58#Mi1wXDmz}jnFv3xZR-K0)NtEo8pN!f`^R>~tb<$-=x z?xFXQb2Uj?pdFIzS}W<4>Jl5N+r>{x6LF;C5F~k5qOZaK;W8Xc`Fy9mp z>MQFx>P>OC_U>@!dAhhadJOJbo@d3s-HVDBxf>Rjb_a^y7GEx!Sv(PDB9)516%`b& zF1k|KxM)tHr?6GwNpSoPDNHR0DZEhdzF=X&3b<-EEJ!HuxAoxbGB=-vy-d2v!si1O3rtAU!B|Y&N&C=t#p>k8}2M{GFgE`I=h}DH>;}S zWmXx->8u#X`mA8b^em%eNEYJgkR|5U&+_M%%kt)iXZdoCSwb$r>$ydlm?JxrcKppW zInpwNphdxYc}GTO14m|N7e`j+2)KWiBR6x6!Wwk`LlEC=I_ZFm47#9 zYkpeJ{d_VvC%j4gcPI9Mn;eiT;E zvlflb>sWLoZ*|e1yyrzhPNcZAvr+MC=j`IA&a1_`v#_|atD<|pYpna8>p!;!*Su!= zWjxFB$9SISpZ3rNE>DMoD&7qRQ@mdau6iR2ecn-pb$pi#=lJx(`@YUaxPNa^Cx33y z4u7rUH2>n_a)EcnlLKMyn}HE-HE`M8ir3w1_%5E0`~gp>;Pea?>UvKJ%e`*lv$viY z?^`HN@;wzF`HYgq-&Gpy-yxmzCrOgOgxn!8O5PVZE$0Uc<;HwXxK7ShGWaJ-9f48T z3EkDd!fv&en5M24%W28t6s@XsS6e0#`X8yYUR~a$uaW=i8FB@rzA_Ejq})X^6auZK zc1BmKJJ4ThDq2Y^jV;y2W52ZvSascltIea(cJ3~XCdYj(D2F?-5%m;GT1uqaodu`u&EYalN4aS39oL)l za~rrA^HZ*sS>(or+c}Yno-eRkCcg23hY~D_AqFO|7i0r?t9mthJ|Yk#(AFCwKzSSuffigCFp-)nQAu zV)i`n0(z{~AcwxSowW70n{DImVYYepSlfDgDcgQ~S=$ABIom^MA7K4Aw7>Q;wj6tD zTY){^#@i!pcu=s-9AvSD2bpZ~K}K7-Aj(!b2(eWU60Eg?ilOCNYXqfQs|0ANZq^+B|rLDX<-iDbK zYYO+%dXu|i-NbFMj^xH!>vFBEEEi+_Yw}rcnBG{{nT}e9ndVt)nR;0arizwi7O`An zznPb@|C#%+v(4q$_GXcZFu!J;+#co*H=0=o9>77IZmi6`H>#$C#&@R4#=WK%#_=Y* zv7ss5z?m)?QrU%ub8HvGTsF?onk}L&>;w82vz|W044_9d<>^WcPkD^bsN2TP)M8^l zs;x1eG8rA@XTue8hhYIZ&;ZP{!A`p9B;p!4rP1WFUcGwL&B)bGc8P^0k} zsw$pG3fNWhIX0i%jJZcbdr7mZK;n%!$DzDplRR(d<$yeJxHQ925F(yMj}9! zQLKK@U#ffcgX$Q4rdm_)q9S@3^}VJk`?ViRqIO6HAhv|R2g4V7zw`aM#z%ZTKXa>b{> z*6x>Xh_j@FVlQchSX25}43heaUa`6O9T?$TVzhVws;(>-1!26HC3J(TE44t`9wuHE zWZ|fgCTs`w`byy}cnY@(^GVRY!ulI%PXc$K7Rr78PT&!LFYpw$U%>v?pojX*e}}p#zXN%EazFrY zBMEg(tU@8wH}MBb3o_I=K|ni?^IZj+A0n`PqG0A{3wC}P=-}52p`gPL=MRH6{tRf} zuY&IVKIrIQfv*0W&=YE){KNZ&$vh)2;p2c0sU@D}+lh~$y1-X{s+a}dHa~wWXyR}oT$(G?mbMB#q;tY#>4mUfN*7K_ittnl6Vs#`q9Sz>!{vX0|JWe* zfO;vD<*(vqIRN^g2my#k~Qv%XQ#Vk9NGBT<* zlf%_Pa%FWcXohylebsC7MD?S*Qq7YOtGIGkjR&`BGsO-3IjgNw%4%nnHrhL76lmL5 zY9{rhR#| zfH!}SWTOR8B8fvIv5LU1wnMvMqtLO~5_Bo(+4p1j(CgSA^gS@InV`%Tu$tg&?*>kz zM65Qx3TqFV^}+ak;9GuUD{wz(*=^vts{}l88$1OchI{e3ph@2j8uSamnZLjr5UF@4 z;L8Vt7jGg_l2`}|+Ks@GA0j3JbG;In@ZH1@;uLV>Hwhmw+Zyo;cyT8vYh|eG!jkc1 z7+HZ#0H=IqvME`YY)iHzJCj|=Ze)MZ){Y>1l8Iz5ayr?ETmXAkl7q-CFB6*vfLOvpAkgtIM`9#hqf0K*IOmZoiPcA0|) zRt24IJ?a42lsXKRV2*=s^c2~JI!pGT&Vz3EJlPjoFWBxzorL?2lFi}x`qU11+&Z!( zwS)|zW{@N`7M{O9RG4W`Cc``bOqL>Fl2-CQsSsC57jcICLmUS7e;0Y5*hoT60P+|y zm)uEAB-aqb$OS|XP;#{f4_Q63CsC1XMMRTzKrK*?;E71$FToP8AyMEmk&W*r-s1~F zulx_O57Y@u@iL%EHGn2HA2|6>cqphEcx)g33tI#(gJJj~tU0~{OTfniC*K+P`0C)I z492d30w0)pbOH7h*tOGWOYn}B!lr>wtRJW>nt%hdBxtT_bT68ZECL0_2=o@x3O$UJ z22}5XzsxvVTx_9&y2B}#i`oKjusri6iyNtNTkzr=#_RFqH34tcfw zU7jF6mb(JiT|?d`2g~!p={-jN4ZfxOQX~18R90Rm+2m=UqZueUq~_9hshspwvPf4X zQ9J@(rH#@zae?##>T2DFnp)>Uk8@P)1{$3X(mJsfsEV2b&)rZOBGv`(OiigVaNAYH zvQoSlF9nO?P!r20>A-RO1VVBG$DJk!;&-UA^&WWc*MdWQ1vR%`3z_1Z|F;}icR_QD zZ-oFHqllk?8~*~epS}vAP&X?cxbg~;1NhzmAekh*eh|EuIB}R%9_nz_5SPO1Zw0>n z2-Nzz1peNK(j4&vyw6m)8Wl>HfdhUfhR8p}O0olLM#W3|IH;4~>Nm9_Jx#MB2E7JSMemFBguZXKeiS*Vzd)Wrzvl#Pr4=cI z)AgQiO(}cF zjlC0Vi$B69;CbNDv4Ib!KK=(E4qlmccxmD~-U0fti3CY(A<7e1KsWOldb2`M{utrR zElGYP8k6}%U(x`5O=;*)nv=(%w$yFtao&^L$-m@z(hKLlfl4EzsQ~m69Muez{@uZC zJdEl@O`(QUOQ^ZjR%#n{lsZRU0oU;pDw+BUK7dRLp}mxiM(7f>iLOD1(5>h=x;Ne(Ca9Pnn-0(U7#{nW$G?vqRxPyVJG#STueP6$3ef@9s1S=pvO<329idq z3F#tBlJ7{GJWJ*gYsuG;gmRK-NiHK|$)Vt>Xh?h{LO{nnxIqp34C?~lRylked~;2)UsyPH9&@8hus3K=>=;@an}_<*9-t8_k8VYEiq}gTRkdR)45@v`wJo844Pn zYFck_W+rGK)nfIa`an%o*Qt%vzAC4dR#QQJb3u8fECdZsH)X0)3fz;b9HIOHRn0B= zfxJ`RAWwt3ex2oba7a2NQMxR>krqgMr4G^rDFW2&4$%%Z`kdk_@HzGucY}^_qNoWi z#E(L_cvvV9rU*}kmcl+E6x8c3zKie}JcGM=QkcSL@SXVwd`12MZ{ip7#ev~425Sf7 zvKsu+KrFv9!0}@tU!rZGAW$lh6wv)I1Hb)O0@wWe0vkbdKEXdO(8=F5P{m&}5af>t zh(08c29=ne`+oS(`JVZA_%8bw`wsah`8N26Lf%9--!y+SU!uRJZ;Zd3Z=^rYH{2iU z8|F9phWatzV87rU;4k*}^Siu#{JGxV{w!}_f4X;&|F3r#EJyk?yklYC1b?1)vcJ$f z8;)P*Cw!ayLGT)-;I(S`p8H$*lKee658ufobmp(mk z-?s+z?uS7Cej}jx-UmYb*@3El9rW{&{4jrAeh%o+clc-W*Ze#AZ~hy+&;OGThB0Z) zK%~$s&=}ILh6x7)%Y^5F(?UVuGmJfDp*d)NCW0<#AJo@-#b1Pcp%k$!^s)V+R^bL= zwDd^WA$f%tQfX0yk$WY1tvFP^CvK68#V2xt!iB1wcuxbtq#$R+Ah6=_C;T(nUHr{ zYs3uwTNiL~Y|_2R2fYT$B2&VqdpX((Aa7;6R^m__J2>@JGqJlX)SiA};M zgRk%ccn1qGj;MgQgxYy?iKCzq&%pCw>=j3j0hiJlVjc8V&&f(8N{u5MQYXmiR62Qv zilI^<`M^f+1eeWMYBp_#UZew^M6aM(!vk;(`RR#`sTrfeN{Dm#EZ#x7*Put!*p zea@DFDnwmOtZAmHlIeh{lj)ghyvb==ZL)C3O*OekraoK}pbS3KevaiHaV5A6t}aKI zyKyDWbWq|pTRCE)ah6&fWr^loW|aG6&NN*& zKQrw%pD@iguQUxek2bY8w=q>Wmo`P12@_(@Vsp3`>^tr>dzo9$?&K!1bGV-DK(031 zhzn<bjc`z_^^f*SKd!CUCu%+PIxxmW!4vsj zJ*4dgFVk4HBshw*RZKmheo#iLdz8}ZaK)iigr3|l&jbZ@6J@PT!x{WhE+g*-#q>Xr z15_T|&BfAW={oq3=1Y-MTTnR$Ne@6fzDYa<+U|*BU9qOfiIR{hJQr>XTZGlZP@$ht zS*R$;JR-c|fARZZ&Yj4w;v4ZpVXj@5{}ZtBXP|$cA9xk$7C0U#8CVnu_y-1F`RjsD zC?t^R_xhXpzxadw7yWtA58wAq@Ne^V@K5lS_qTz(pOU^jAL@JOOM_&-yO8y>!@Jx! z$ve^4$=k}=v&UQ2v&I|dnd8Mh z`voo2 zbFaA2^AIw2o)u%>H^r>?b8(ROXK|c2rMQwetGI!;ptzk^Ebi?!xQBbg-IKj#-Alc- z+&dw2=d8EA`?0r=`@46fJJ&nYEqYhG8Q)%aEadOh@;z~P^8Ip;^cA@0K_1;!U$Eyi zuo6#vjXf#8J|59G*%Rzv3pqR|JnjAWJtO_!J&XJWp8bB@d&eK?{o$|S_4qq_O@UF~ zvVkSu7J)d23(?rcwe3=7C^Pd7$rsQqJ&5bl+KWAw_N(8 z+>scyNNT7i0QS&JKBlgcf2wz6TFaN~Xz|KOt+%ob>bgGF9xHxLP|E35)dBh#b-jK_ zy{rFJow`+vK-y{TkcHYDt13I{+QSR zbz2|cX3~wtQw zY)n0-1k;nL&P-yOGi#Y1%n4>B^B9u#l9<(upV`e=*i%e-_9oMuea`e{ze2s%3}!Ct zX4bGO_>E}x1Z!b0vmxwlHi~`7mSCSi2H!KbH2ab*$-ZG@*!OH0`-!!)-{81kEXyXd z6r08(Yz8YbIjj$~q{Wb<#8A`yX?Y z-N~F`S2G9M*`U%I&8!Cx)N-~VGlwn9B(lNGP?iKwmEYKk%`(;lzf(o_u`!yxXk^)g zMuAyx%x306_Rx6aJ*KbmG}GL;lPL%FXwAlnjMvbI`E6*xJT^o#rwpobogvMbXn1Vw zWH@51Xjo)q4TFqMx`FW}9c?^D>xKn%I(Uj78|u&pAZKTe!Atcue5UFfE>e+(4U|MD zQora9)K$71wV9@=MDX#prC*S7^a;`r-~1bDFnN@!P0piCWN#{ys7l=>C~6y#OeTUq zw-vF1j3N3%mQGdj1GwIf<0-@({07k(-$0ZGSB8$&A(Am8aT`m)w?hhJBDMx^35ZoB zBo%ldrQt24Hv9(;k%i#1?TdE6s-Q6#ffk{EkSCCJvkhGZ*v4R_Cd?#BltF%j68REx zTVDm}+8{`fsEV|MI^|{bpO9X40kDhJ`W@{beTUXipQ456-Lyij26*#>A^jzwF4TUi z1GMXE9q`Tufp@kTytD5>&3{@A26aELj8K!5Ch8+4T>VceQr0WaVFs~Z>8s38>M327 zaHX=s%O>#67RYzuT69FdB(IgX!hB+yJV@>(x0dV3RpfB^6D3Se0v-N+(BB_|df>~X zOVSu=A7rhplB!8lrBLwCYJg8T#Cm{Tlmib@gmg|cf-YZzd524!2~OHE;5+Isz5yT7 z6YwP6fvedyv9fpx98c%PQ1P6|il;?H{7>`?$3&NKSo|v-6n_c_0FT%Q_{1L2>hBWo z2s_0~u>23U_X&H&t;T-YwI61IsOgdL#c-wXF2h55}{@tAN!JSjW|jsJV` z8tD7)gVz5g;2~cG1h5T@lm{4xAMgznuni9CjYo)Mpy?_2Thdv6s*C}|f2k}e2V|RiQ!WpbpfOf?$BC-18$J|Oq;GIX&Y3pc2Z^Z zr;sctXmVpENpgvo>1;41z`ZZ7$f6;Knt;HZVy&h6k?}2oLJkpWy zYn%ffsSWyVz&}1Am-KApiOwTGbQ;`xp(q8YTs%?-`s?;khkGzO1DT3$MOH&C?t_5b zT}O+N_b7+vqUE5>eG5o~8irPfc|=?65IO{Kk=fV}bSqYfp2u+P1r~uN0YaFMwZ{}} zIL3m5Jqq87Rl-k$kLoej88G01cri8}r}23(JKqHUsbk;N!ysFz#q@YFXevL5c0)Qdmc1;!BAi_#F6jCK8YF5yTC=FL4g< zNF2c%6FczA#47mI<^Y~M4pLwG3b?vL#q>e=2S}T_1)i(p z;J4ZesQp4L6&nNJU>EEX_^S3`(bzKhMia3D$O8L;HUlqJCCD3yz;*$~yA<`H6VYUJ zAo>FB01m5$=x)GHmI7+_FJ!9p1q!B^E2VIbkdtCvQSbq;x{`}M1k1aeq^s;}2i>$4y!WVk*R(nWeg8cGd49@0mQ zdV%H!{P%};6*9E;gL7)7wm_StjR4%G3wWj)Y9+LCnh6rJWJn)ys+sB!^#kBA_toR- zd3BAtU!4NC1JxPel^PDo1YH3cZUWeFCDo%us+o|3^+OTC8I`ZxQ!SWa zyam+esWM)<4L$C8r41w%)KNAn<&*_Vlrl*%0g^1sgA|9{Tlpb(P+rK5lsj@|<-8oL z9F}d$R+u3ylQnsk9FYH&i{w#qjyzaSgMRrJBpQ5?Tge~frt%xe#(FI`lwZjWU|Aow z>p`mz*;x(b_i`gRt`QvHNd5~g2aa{f&7rk|^>%VDJdaE6BzpiolI3nPru2s7gdwt7 z87GG-)8TzAl*=nC<=VtdVl_Rx5AdRm#N-c}Z>Z=tVC zRStsR^McC2Cm*kV1Oz@^ZKwJmkBrgAtMS@=xXNu(J8H+(q1tVAn)X&*rKLdXYLR+T z!?Y(_g!UbrZ+Ti9NTcbm*#Orqqp#2!=ts4#`eVpk{;SQ?HEp{dsbAJ>Lpnt_NRgSK zv&ahgB^}TkBiHm^$Xk5^lBF+0RDCZJ224W@$cpKPWFV6e5nO&&@P?L0U%@Xm9T|uU zkYHy+*F%2HambFm3;w__kRRy;JP^muq0x}nR2$fi&X^A!0Ykl6SUh+VYhnAbwvZ+{ z01{&+VxO_4SSGd$^Fp2x0en{ooMC0~4DcESAmiDD_X5AsKaj;V8E=FyhO=@r-Wwc| zBf)(%9nQ{WkfpQnat>4#EZOy5!D6cF|rdmgDeE5XbdtRzQd77 zeeh<+K}H}3Ie=L@0+@ata7CXWUFe4X4)Gui|r0V^FB)ywj zcWoz(x@N(sYmnwwn`z(GQrb0@*0w-zF$wyLPU>Z~qPkr*sx#GGrH}edsiPiJLgBX_ zQ2Hr9l-kM-C0N-3=g3Uuo7_*i4(IxAxdb@SNo6P`l{AO*H9@`z{>bg}Uuhm>+l>I8 zsDr#hswR(>Lgh}9EY* zRTf1t2ryp-GT3s#$M{{iEj|-YiC19kbO?Nl>xB7`V>bmj>``J@p|99PXfIY38jA5k zRnab#5(yy+a^37=F>e%|JSt}Ml8_3KBj0%+WZ8LzXMC}6pD%zs6Nhk)&k&CBe}%n# zim;te61MQag^m0VVLi09{4Zf0pA6e+!d5U&9Jdbg z<&Jru#if2}mtJj!S-27K!6)vNy>j~!A;{f$~h52$_h(%KTe zk9JmHrTqk7A_jBeDo}uM0F3%q0mps=Iilx4KNO4x^k!%YWIWmlvg2kU571+f2mcu{ z!%U(ERv+zyC87(lJ?L5NF`9(speCGyJh*z;P<#xw6yFUP?Q856EZi*KMLpZC-7nG#CW0-u)za>Azn>d$XmcC<&o3KC}5jALt@k-NUFU+wS%OnxfDg8 zqiWD8)G(R@_hKWM6OX5z^d8!7cnwKS5Z&u|uQzZfnXL_>-p-bfqU7^}kU zw=d+zFE(Cdku9WNMFV+2!o~?7X5PY#*TZBo;Vi~BhB@p=Kyufz zL)a_qBK8k^h(%2g0LM*c+nWS-f+@tb-c;Rm7O>pckSXsl%{S5Dbu0rep;mzB4&i>9 zW^pdlCQb#{5XU_P_1F)tBA3tA=P+|yF2dZCt7aa;wF7*2uz4mo*}RNfY~I3cG9Ta$ znNM<;%@?_6<{OYdf1k?&%(vM5nUl=P9B#?sOqK#J*y84*EIuyY65vWft6=eS)c}vJ zXDQ^GSn{|QfXlYFq;u^o$y_H;ly|p$<9b;>aD6OqxW1N`TyM*BuBYWG?0?90vE1g` zTP|}gE&p+iz>`_avY9JyS;0kvN0YTAa)NmPmu+s%eKl9&9-Hm(-UQPjbD9a5K=U8Y{=Z3K@ zxRz`Mt}JWiSk`MQWWJg{Fjq_$m`#w^JHeF5v@x|~5=;q<%F4!H>{sJu_Ka~gV7Wus zfyUZwO`{30+)Ti7Z!^~nn*hrl4OnhNrZVK?YV=>@C&+L=MlUeVq&phh(h*oTTm(Q>fKM1F9E+QSrnp zvH;&e-o<;8tMN#(7oG`N?m5BfNtOlWLHf^1-&|Y4=OAy(a$1n^>KjVmPI~m+4^>1fBS30^f)a>&r*MDr`4U> zziLmds%nF?_@jD5*$8KL7qyjQQCa1a@>$*rJ$FxK7`WRj$p7Q$ETGzGyDc2Ii37pi zwODD3O9d^|m3pbWesy5tzJX!35tLrf#j^|8Yyp1<#L@;NOu5E>Rlbta3H;fUa`XxNBO}?a-;cuAZZw zp>_u)=Af!U^%~sSH>)P9VpL2OK~<@&Ql3;^Q>G|$lo(}*GGAd*Y*myfMk+2UwDLL7 zi}hCQluPAt@&Y+penS3BHd}sB<}Xi{>11?Sk*r>NMRrrVOqL^!mJO3KWm-v(^ta@r z^o-<~bgpEsG*B`^suQ!M|HLB6MRA#AvG|!}r1*%0E?y+*6D3IUMNX2_BBOYYh$r?J zeHZJ5_r#^bed0U9dEza?7;%QsPV6QGS-7BCR3Ru3Jrmp(9TV&mtrRR4We8G5fr3yG zUEm;+@yQ}CUnTs>?-f4h*9$N5{|fi=zY14FPRVTkBVh_;mJH`#g$$DmLK^=xY!-?ncZGJ+a^X0sUbso>C%Pw{C90Gj z7a65_q97SxG+*W@z9^d@E|MJ*>%o6MNUjtwlm|(!$d^ef<&PvpMUNy-;U@i0F<1Id zaZ@T(v`BrG4ze6&w(NoOrmRQV3R+KV`C?V3{DEq}yj%4_?yMe=PgA=nj;rS?^3)F% zLqOEqX`+>xnq$i28m_WjEA(j4 z=4WZqhFjVUgFt)P5Dluf<2nyxlWwWeTmRCyQ72v8O`WbpCqkx{n7@*%^uo+d1NCuWUpAo@4%*bKBWL#p_GJY|&3<1-L zX~9ZhMzZEJ=L2hen01Z$lJ%Zh!zyQLSba=KwuzYtTyYLNn01Vu!g|S`&#Gc?V#(M? zSZs^yzzXNGGA)W&D=pes$1D`ACl+}2PYX7?*20n9ZsElqvatV8k=es1SPAT8%Lq8m7mjyeCs;Ds zah7;?tfihc%2LV-wd`a0Shlj9EUQ>-%R(03@*PWMahKI+0m&m4TUiAb^I4BAQdp-f zf>|3aY*^DR%*<$uKBlupF%xa^jM>dT!u-u%1UV`3%-w7U<_xx$5z4LuP5%o*&*@GS75 ztz_t_qZn0G9OEIilD>s{73P=o>1?Vmorg$hkCBhGwa5lYV3hWRv`ax#WM8%|NV&lVoHO@eTPgaV>csaTwWwC?xR+ z&q;R)%Sbr{ACe!TpD4mVAilxRA#TE36G!4J2wL1B!e_`H*@TnB{P7W7v!>&pV#)Z$ z*snNX8gVTcN8CkBIrysY#FC+Z@(azw>_DFZ@5Rw@ol-#x!4vd+@IiM#QP9oieAE^5 zTGUjyKC#X1kYWI|KUA<|!Q}`KF0Fqc`QR{AAIZiRBguFk99lArN1y|d0*o5ju*XmW zmBk6Y8tAjP;CHwZoL0Q`k9A!zSG)~QDzkMcosF(Y+n_z8JrCX3Ebx|MXvLZe@UA|o zIi|_bOx4hV_pMU%)#ub7)sxiw)mG{WYOcyveM2QtEmVC|`KwNumdk=9TV$go2W6ur=Vh6ayRw;*SF#+)BiSS=mmPv^kLwaCWLV+lexJ99fHE zr%bQ7DYIApltn8CWOI~kP(#JYpFqx4mGZP)tNbYUQ1RrMDx6}g%3pC;HAPVZEQ?b0 zT;Z&4P^79+%8lwk<#lzovQT|iIi&ulw9uH8@tO$Ldd*7JW6fO^PtyP@ECg8R1oaN> zCUvg%vARdwthUk7HEA#-+@aeJ*&S~+Ezl#U>Rq%6`YGB?`s3Os`k&fHy;e&!gz82a zR_fLmp6DJ!f4j}#uD3ES(x(|8>i0q)`>WBzpn%DDATTL8rgpM$QOS)qzeX(*z3FDeOYpj~D@OhlZ~YV#!YaOf*t4UDiq?gn-)t`U0y`WgSAQ>w%5hk7Ru zHwQ1nodMeSCuDQT@K^Az;E3E(+POa`)T3-JN*39*~l z4BoQ{q;5o$E|XT0svsYWNG^pdj25ytyvXMS z*-y)1HqmA>i)rJTpJ_?V$Fz~mOEiDxL7EeDBaOveOv5s#(BzCUv@YOSDj6=cZww0U zHA4>i6dq((l~NBfzEIaQo=~$H7pTdMUDQyK{6l`j{?8j?=4< zHIQdDfqo7Nqi;m4=+hAuEgGq(IV10AXyg>Fld_!lgEEeGo8n2^Nx{-)P`aoQlCND;{sxkJ)WwvdV`8KkQeH^>lC z5=T;shy==YVl8hY(JdWQ_ zoQ_W?y5p^gVq81SCLa?H<5m+U;=%~_I2FDJ`xXBZyBog+JQ(A#Y&;3vIjb=ba5pi_ za4Ru^xEKt$HG&^-19~g=CVCWh5n6)@M1RCU#}ZSB3dNj&x@aBhPvoAbTZbP;*BG~ZOLq`Hxxs+Y^%IwFr2_P*tJQtURPFQ>H0zDA~$2%4S89^1i}Kxk}Nm7@_#Az$lI@%AsDmEDu#I zkP{U_a-Li-doM4NoseIYEtD^iMFXS4k?Vmq`6n%rU64MIEs*Y#g-U10aG)%0kkDmM zB!ki&l0VW3l6z8b$zG{mJYQNVPLMtnyGeJ6anc#$en~i}PU+%Tl77)S$q&(H$qmtL z$$z3m$#jvgBvQndI6*Ci7xBd+VY8SkEEWF}7Kon6>+=Zp15D|N-P%S!Dkf1=QW9G!XdGZ&RQiC6|TgCC`K}B;SR9C3V6s$&k=2F$*1}mZA|-AJIf< zv}l!dyy%E@p6I@GgQ!4yMARU?EfPvUi}12qk+Vz;o;5V_T$#UkuPjx3SGH38O?FD$ zEPEqXLuQt>TqYhRXG!MDLnH^}6D3dOn4#|Tk}*?_jh-bY7cE6|I9?>vQdg|kK`rV+ab*qS#OD{vT(z%e0PG7h^3 zw;KBZcMJOuSB4#e*#QmjiVMZ3fqVHT+!p*@+(npbzQwC?RiLJk;FIxe!g_odC{`vD z{^HjIt$m7M3DovT!au@nLLXrd0S_+oZp1P|EKxz2PP8Yk2hQds5adsYdx(X=ne&Lh zz+JwV$Rv?S-oS*$lQN;hxRJDubPb$=zmx8hx<~~iEV+r~Mplwy$xP@o`hY8N9FU@u z$+w~J_!+v6)#MB0A@U0c z&eE4wj)c-`kSJOmGKSWGWYQX;PuGMjqO~AvXgpvqJCHrJZsZ`X4>?R5LXOZx$WfXM zdU`VCI8B0_gnegeJ-}(UBiG>go3QTE+K|WazSp!)sD;{*JX$MK@V~`FzQObFaLf-H z7gja&2#evEUq~)3A9+FhfIOwWg!eu~?$d4{H)!XP3$&x~HFm?--Gb}|7IZ6ZDY6Rs zhzn>_k!iFvB!d_x30+fWP0 z2=x`&gxn!ZkhA1=WG}f4Sxf$c%q2fY#*;50vCyLoAa6r#$tw^7c_yMFB_n;LFr17w0@5W)I%zv4f;5-nOiG{-A>&I#L{n;so#ao%pX6)A zd*toJedO81`Q&I~Jk)_MWIX{*ZYK1Sz7qbCt`nY+b`TDe<`5Q>5(!BpFM=D1LO_$m z_)cOiWK@2_-zQ$hA0YmRhlB%sA~6E*LbS#kp;l}rwBkMzzT?gj?%`Gu4nm&S66kCs z1ApX$>&6pt-|_v}tN0@92K*CjI^|i`|OjVWt9mGzxba=(nvH zEN&*I3mb*`244SHu{!j6EEk=I{e*VIUWDuZ26Q!MB9Iot&_^L}@-6>C^2`%Z z=p93S`JZYp8I=PGHPL9TnT@VC4}!w)2kMadDr%v5Jt`g?6CKSikb9{zb4}&uk8odb z-n7TO#xxV|+{4YjCOWv0_nCT(-%N$Z^QODT9Mc}-DAPP6Vu~|%KvvWzqY>^CS`AB$ zdB7rFh2*GB1|et}{s9krA8OW}hC!Hsm%!xvIZVS&z=V6b{tHZ}&jTB~LXQIqsYmCm zFN6u$1E7x%L63F0?l`y*FVGFsMS^O|3i2}LkmFPhJkcxdWpF*-uFU~A1YEVH| z)hP^+y(v`YD%zEI6*bCppjp_j_@dmX$W<;;JXU6dmSHO7bBQjSp^Rz@ifKyvn8 zr5ETYoE6)Z_KGb^8^uPY6=*3e;E@T>X|SIRuM-seK}T^|X_OyR>g2~kA8|sdm7jv` zGqCTx60NubdWoA#lHxvSE}kh_ig)l?1xi=NU!||2LK&jqLJnxRG7849#&x`F}D zRSv3E3SZS`#c-%S$Ec1&PQX>g67cxnqWY>hpek3KRdqpas!_aG(UqktZ)J}vPH9lh zP+F)rK^pjJdJ8=bNW-f!Sgd%n+x->~Mp67E2Qhz zX+l6}F-iASvqAS;b57Tx$<-M(W#CcTuZz=S^z*e2`hD7w`g__bpjX=fTD5a}sxDU_ z0?O;DI)Q$dj%s+M8)m4|O*E)=n+%TntA==efnh$(1NXr^@IJ6xf1vIi(62Po3@44D zpzNMuC^zmkh`^(iVX83%o0Ntula+BDkl_~~YwDwEn~`g}Zd94R8ZFFC;8&`J9;%&b zr8yEjOJ_np^cGVgFk0>Acc9CyHE|$oDg^Yn>2R066uLtPVODq_Iz_)wdFCz@4>Y+b zln2@!H3mHf=)xtay+9e>gjBsR=pU#CNcNCJ`W^%0g!TnZM8@+2a;+G4vU`(THG9*9o$QJf=Vj{sHa%`9b5|j4Q?{N5SI<))M9);B;TlUkYo+I zS_}LcxX-u+b^LRDIQ|2ub_?*S_&=c1EeADhJt%ZrpuX?K=iqzrtMR?~Es(ys2i7rs zKWOC!;GIMG$9N%VcV+lIyawKhB2<7vw-NMnouD8a!XpGZo&h-qR?r);13uUZRA(-5 z4+9281P_8I{0Sm>LyC?cAqq5U34{>BSV9=+>>@yMHA)E!x@ovZjT8ZaD4rwvy zgWrKZ7@SE#zs$g|#cOc0@SVV<6#;wp8aT6yI0HD@cH`y)*_I6CTM)R7vw)3LU>D)I z*s-{LtT+5d384M$$1VigBN6gXov}yYU%_H*3nmeCzpn5v1OxP5AGm6lVj?iFF;_p9ld##xdZgRe}VOS0a=O{Aa81? zS!!BrZZeHG|1d?EADNuYr%XiiHprY=0@;d_O@-i*{TeiPSHUBDzlmvD2~OG5OcG;~ zsofY18V+Yuo{?^PVKf5wB`}^ewik^UpFVYzwV=d@k}wYmWkbIhPtMnMISGHizDn_&M2z4Cu+BXpHFfyQ|Y?B5E){v0q56NgTkfb)=qy|MN1MZHz%zJ@7yJ$)? zzc9@*7s7eH$+R1kpXbd+(-UA43(W53TF|EtfY-YLcts}a3~*?#%+bJpOhxspgBl55)l<-F)CROIA-0%>KBrGv&b2D=_I$!-Ek_7}3qByje&2OcB^$kMI2OSp&7 zEv<%bsR1}vA2@$b!QY1SZ6iJxPXlFf66A+%h4a%Z!bU<5;UU2mRD9_~Ghq+p7Jnel zAo3xz*p2j=IFr;zJWsM9m6BqiMq5T2Nxno{M*dB@LY9;Mkli4kIGyZBSx-);TqLij zd?sI^G?5D^I&u#v`>04HWjHdOG8Z%khmfn3XGk9C585bl#6+>7+99#jFsQfEkfYRv z$Sdkjq=tGGQByx6cC=d1r%6DkMx}11LHaN)f%=d(gIWMiLUpvgQ~~WQ6-~cQwWPnG zdeXm8!|8vhN%TtU1bPECo8Am81&_KG*ov*-m%WePM?Fp-pkAO4Qg6Vz1M4n*2p)&1 zH(~o4Xy`7{8{zdL>RIsEK0$vD9@}@|z2~7Pdw@ERz5%$5xzus=RB8l$IMt2rN~J*$ zQcqJMLr|M>X}^&_ps#yPJB3`OZA1>xW(QHgN@+PZ&!sBsi0=5fr3#gd$Q3;ReZ# zuz_U2k0&+a1HgBOOge?{B`(8%BaXpeBD&z$5RJGLVl&Q@_z8z4Uchw_*5dLBX}Bu{ zZ`?)#9+ySv#rhNeU`d2~*na$O>>vCr>>d0_$c?cDmcxh%#CKvS_+OYI+#O6gZYSmq zZZ_r|E*`TE=ZzVUV`2huYBU1QR6^`GbOrV<`W5y7Oo^AGmt!-~$=EQo57rhDR%wN<4%rn$J%xTnI%tokhra`VuB+S+9QFV}j^bXx-K8OBkUXOkVlj!5-AoN;L z9!&)JqF@IsrR+XB7N1n7m5p>CSzEti4N+eF{tTZc{6h9Q_6nhlo6$uIi zx}QbzKk`HJqw+*~mfR$>ljq6$W!q#QWRbFcGQD)X?1$7&c1YSU&5(YQT1bycn#OyMGtLNH#`A{Zt5A@CGE5Lk$g3UHz|0=004 zKp;#MbO-|k^+JxITxjP16Atlz2^;u%!r%ON!uR}V!iW5Oz>VA#9^hXSZU9zfA*^Yz zJ)VC9e8X=Fqktg^hhv5TPZGp`3Xd;@A^f+(P~c2L;q$_PI|=7^3rFzf!bm<=6w9{| zjpn>t zviK|CN&J@|BCg|)5%=AMhl%5r4NOOg2q?N++QlapZlp<=DdW-bZM3KF0 zt|(fzTa+!kBibb^6y1{bioVN;;!c^5m>^FR`^(pgv*ee>8|B}{SLD6oJULm?E)SI8 z6j>5K#U@FX;+kZ$;+y2UqE+%;p^|heXi~J&L+Y%Il}0INN~b8dN>?c_NDnGsOK&Pm zrC*eT(ncj&rc(OJELCZ;VATp)hU%DXx$24Ri0Y5*k*ZTxs4~eqRCaQMYJ}WcJy{;2 zUM){iACb>f-;r-sf03V3SIHl$`{em*qr6USsSv0G6*x_@!dA0b5un+xh|}CtOw#;R ztk85Q4rPuJYkpVE}-OEg+NMe7Hi z=5q{}w3iH>+FC<^&ce7&H{DpGyJd9L4}fd_a8tGZfGN;W3;IQO^Ppjqd7QD_{K)8s zBAJfEnSc+JSPDAI{0RNjjKkQXa$tH=j6tJ=u-nj=unIH=w-#tLF{EIv!_L5Ku;uuD zxFk@OeFdkU;Y52#&b&v6Ao&u%klqoa$uXon@*mPDN*ehSrJ6hfnMU~x)4&MoJmd|v z2ReZ{)LXQEss%lVc9`Br)6$pHS1<&i24BZWVX7HlnA@4YEEDq@Yd4F+MzeRY53ogS zyv00={T4MAW{V`tt(I>s#g!`VHCjom#&9NcUU5EfEUcN<8?BdG*I4IS2ij0< zj@V4K>9M(G6KC52{EwJBf_sl-mPHfM!3wEH` zuW%S{f7c<~zSd!%J=XEQeW2q{`-zUN_8S~!_U9aN4!MqWhcZV?haN|32aThh1LEZD z;Naxy5a{IM5b0#+km$s480Um@NOe-%r#TJ4YOqgr`ei@X>4klq(=oEF+U zIgPbvJ9*iI0LEV7*kjk>_{*-?@wVL?$8C0394FcBbo8^E>u9o#cdWK`alB`XcHCgw z>5ydm)4{^_jzhQ24u^L((;W8NggRu{&>U=RdhNTd^Xy+)pSIs-y}*8qb%Z_58nLhC zblP3#e6m~2IcXQdS!idnin1%Qvaq{g#kbA2`fclDb<;u+m z?A6wx?3vc~>~Ypu_6X}CmalaU%icPVMYVpyGIFl4gq-87PR=eCm$Q~t%bCZj;!I$b zbH=dBII*xs!5YCT;e@mPaza>zoFLYBP5|pG$A|Tvr-abZ2;*t2eNtXY>hY}QE* zjkS+MWNqe{m@7DP=0Z*%a~7wOIhj+;Oy_)Kj^Vs#MsuDq!#FpY-kdW`Th2ZvfwP$@ zwpz-pvzp5MWR<|YX64J=X2oDmvyw4}S=BNyR<9X#mPZ&5EVCKwEQ1+QmJmX1SxGOm zxJJKXv53Cd!k->&A*QL>?`Z|>ZL|aI1X?mr>#gGbpwAZgLKO1zC%ekxt_NBe~+t#JAW(#5gRKSc*AIn2xa`45F{#x1sIv z2>KT88p;*tjRKDfvkNxHd;?Qtvc^m_okzDCS?C<&8Srvvpf(#$no$N4I8yI5ar9c4 z(yTLj>G+11+8jfKwp0H@GgF_gsnIp6Q*;Z|-?ai&7@UNB^cQ-$rfpWm@i2a&z01RYJspEEAAD3 z6s;0^i8O+X!h-^wkSW+8xWez@yYMITpAP*V3Kkl;y&b(ay-Rx!_egux-P?PXcH?@QyH0jzbaA?Wbl&L-?+oaA(($#^ zsUx-Xe0ysL(!Q?aU>m6e-FCBmCoi;J*IL}RxpjV9d~kzs!`dzxAAH-yD_f$QA2-Ibi=u(>iYPmCG`rfvF-);V%-vMaGeLY zqPC-PP3_G_%i7tEc{Q$$i)w}%=rtc2@~ihXtgfEW;8pF`&{3tTzgAUNKc(tHJ+o?O zeOcwS`eT*D>(eW3>e-d%y4H$;x+fJibz3U(>&8_)sk5uNR3|AvQdd&Gv+im6#<~;b zE9y3t=hV$BUs9J|zPc`|d|O=ryzWwdv5r%IuZ~*&t`1XPRHrWMtdo@K>x5;TdQn+O zy|gT&URk!HUSD>s9#{Ufo>^W}?@-RK_b#V5gqDXk#FbBN$SU97kW+rQVQ2ZjhKuFm zhS%jbjV0xYjeX^-8Zi}@8=WeCHIAwfH%_i_;jXKg!aY@Socpxm8~1kwpW9Jk-DIjr zY_hLh-88cDM$@Fq;-(dq%BBOAF3mS8lbb(Qu5PZbJl8C!{MbyX;x+qK5n9r!LRwZ= zWw)HGI^6QL>TOF;ReKAgn%NpzJ-T&q^?$8rs&iWls{31qs-1W?HB)%0HK%xcYD#$d zHN-Y!&Dgf2+JkK;Ym3@iYnkoAb#vQ~*1d1=x%fbnBtJg{7b8d@Z`;LlU| z3Fa%u2y;}6MXS}v#oINXCC9XU=`Edy{Ih<6qS5eBsW%E#?&fgKc+^fUWNqu7V(bh} z*cCBkZPylitxUkOvr2iX$tPI)QzccF;mg z|7#5^NS)uCV=Lb;*um{lk8yO~1nvTZ)sy&c`{%A<1Q*<4o6IPOIEd zE<4;KT@QM!b35kw!2PIKwZ}eht>+dWNAIP+F+NlLX86YVuk~{YIN)y%I3Lgycqi~> z(DR@j!`=)V9sD+!6!I#hEaZOZiO>sSsbTxV(cvqHzX{Kbm^s{U1hjZZ+>9t0Iby{5 zQKchhMNS{-64f!XBWls8YtcQUGGgXO8e+JSPh(S}CdPe=QpCALpNT&h?VZpcotKal zlaY8erY%trvpgv(R+4licJ1ieSpI0+xcOsd#np_t5*ItRI_|+(LOgz4MErtr3*x_y zI~LDKei=VMxhy_6c`)9Xj7u1kVwZ3{#Xq4uWkdo!H6bBAH6`I->V$-^sWTFWQs*XE zr_D=ypD06a9)T`u@$mz+8B2DA8qwb75GAe!?dsNHV%Of|8 zwHrwsdvnB=!+$2?aVA7KCM~Mw#_K8Vh#}l4~s^aOP+41W_ zzQYHYt@3Y^o)jQ8t=OweT@$#{W@XWTJ>T#B{$^DPjIX9i< zbJrlt-!6+RTAZ)2*Zj-Ihc9Y%`HsYkdk4a7rme z4vFG!l}wJegvp}CAL2GPj(CYRn(&Hw2>*vsjN{R%I3+C&i_i{XoT%T>!w~~2of3_j zN8WB;NBU^mPm~xh68wx$@rw)vxLf*KELS&(0Y5-=s@58PNE3wmr5aG>>WNZc1ymYMRiL%ALyH*Ep;3Tf_VY zef{$Kk@Z{ZHrE}l{ZM7UOynZG}Nt1ZNRn^`!mp!?UW{FT3ueC5v-d3%3Wd}aL%`}*+5wJ$M0h+nwh zw|(C7UG~Z0`>IdBz6n3B{uE} zdl#Qy_trXpA6V!_ylu#nzWJ8-`puKPwQnxwg}>RKXUg4_SCzXW?`iJhyaT!O^A^Aw zox3#8FL!gEW$uwYZ0_|u_3PX`+3Vsw>FYjtB<5>h56j2p&dRsSJ(};A`z3!&t}uUb zu6M!V+(iY?a~~Geb3?v`zuEq6?VF-+Z{FB_SHD^NJ@#$U_akpTe>A*3{KNa5 z_{WZS**_cKRsJ0DKIzx3_g{aJKZF%-`H)*U@FDE??2n&+*M3a+GyYTQpR!NW|E7QL z|6B2S>%U1~kfOFPcZ*hj9a(JnT2*`|Z+VGVKB@Fu{*%(_1u12!0)E+*Z>P$mzlT=z zes8Tf`{Q_J+|TGL=}%$RgI_nRXBLjHVgJU}mi{iNJ@{u!UE<%+dh9=8{l9{M(tzAp%A9RmxSk>dvIJ_6jHTQOLOZp0%uJ%7{UOsTFC3zNu|(1HNGj|-ARFpiBp>QeR`3S`l)^!lN;;%c z>G-W`g0N7-5xvlQi7)AfOZMu=O4k}@%H|o@%O{$SDH6^1lwqhZDo=Ec+7=_wP_bkk z3g@F&;8F}j_*KRp!g*5%@r${Y)QRGfshB!SB(?@whJ!99zK&K*Xr$|j&5U7W9&<6J zoplrGV%JgoEfBiMGM=Hd+Q>9;9<$IkO>Dd^)sk!%YeltR$6+`;ux2|p+E_SI>@1w4 z?b$A?92l;*9BFQ4kh-CFrg(U};yfq1={&c&3%u@nw0Zybtn?Z1`tD2ddEpn}d&xiD zZ)d=A{{?|Z0>%d25A+_EAA}FC8P*Zp6Z|Da9&#qs5V|l76&@0f8m`8f?7@qnw@jz;0VomB$qEnhV zacLSSDK{-BNs~4@DK>pt(t-4)N&nLSOCn|*OB$1LBWYL0%cPGPc}e1oze(QX%ai7e zuT45XzB#FId}oqm{6LaJrZ6cvQ=GIuQ<`)wQ=U`+s~PqwGP{#lS=^-Htg@uEtnW$l zv)(3c$hx1jC+l3&v8-K5=dyB=E@!1DUCRoFWdYmD#8X*yiF>kMC$7vom^dkGdg6#I zzeKAnO@cV{d&2L`JqhPCqZ4Ll%Ho|e@5MKdPme!0UKTfY{KYtB#_+hC8C9_{8H-~Z z(-kqR(s#v})37ne(+);6(ooSSQa4B8Qu`wROPL%wn4CXqLbBti+;Lk+lE+nzm_62g z#IrH$B2=Tl438bH58szGBK&*ex-fadlTg=$){xYABxGG&MDWGf>|yU?b_W$lUkhxE z$_?m@{Ovz9s_B2%N!bXck1#^#-8Ee2)fle#ED6(ld<|8*KMoPQoel1F-7>7jWoA&d zb9CTeC;Naej#|HG4voIo>_7M%vpeU##de+70-G$)4C`SYqc}8oAFCl(TgyK#REvAg zDE1yFIdhI}Qhlmnas@?5JAq-e`mL~DyD1PSXtzJz%X z_n2`Hdx-u3vy}E2okD$v@da1B~}{#5_tNzc!BO7&ZND7WowUM{4_h! zW7Hc_3sfu2dzA}KcNH^@zvYt*gR(3=LzbxlocSiInI~Mm&?6}mE+g{wwYDaZ%ZX4Cr&fC6s}`)P2-ZL%*K!0@&+8L)GT0!0HoKO;xT{C#v!*$5v%mf+aw-EVVk%|jc9jL?rizp0{EGSIT@}O2+bT%qyo$E6_KGiMeHB;BBo*7r zj1{v%02NbaU+GllSBWi)t{f`OtgIAb3l(!Eu|rFW|QO21XPm9|vbmujjkORcMs(&5$U(uviol9ko` zl7rQqB{!-YN?uo&l>DsDE2*x2R?<;@r9@nPs6=1Av4l`Fzl2efUc#vvS>jmZS>jp4 zF7dB17l+gci$~P77RS_-6_2j@Ry@AuZSnM)d&LWCP8Bby*G8mFS)H9jA(R_jyDs+(1uTz95;e_c^=VI8^zUmsVJQopa{ zbbU!lW4(2$XT#Fctqu956%AHpK8+j84mCEG^)*J7kK^7gf5oL%I5lmnxX?6EL2jN{ zxwZLErJy;vYIV!+Dt=3Bbx!M_YEf%e%~oD>4Z1C-_GFu~)}j4a-J5pz`mr4a^(`GU z8rFB}8mL`28lQ9}aL08Ia0j~2H=XE-YYy!dH8=O(ZaLVO*&5PsYUTC6;vF5B+ctdA zqP=UdwEfJ`zK%G4WT%KP>AWd;)HPl>s~aO?_Iwom?O7_`)oU#o*;go$_pOmW@3)uD zANVV?8QdVR8FW^h9x7L4^0z5z0#8+mph|UExLutj^3tHhRhm5UHtlAKn=V9JsuM~# z=%2`J3_0>|26x3`V~2ujx~_aFlv9^?6u# z!%Q64pvIjxp2v?d1roHTKZFl&4-dVBOA)MFj*PJv9KkF`wd)8Yl?QDovr)|zzp>1tB zYi+M{nr&^Z~D(?B_WCvft=vv_I?^?r_0ziNh_&YYtBw%N^c2njG>R104$;XFHZT9(AmB zeDB!c$a8FTBseuYhB|dP&T;B@JmMs9eB~r|taVa2YMpeBP9SeibVfNXb~Za6gU46S zIHwwCs*}pu(#gif-YLq(#c7_4m(vlKK&M=n5l#&*NlrSKOeYuD*-mM$E1b5uZg+a@ zdeo`f^^%j`^%3NQeQ}!QR^fEYt=s8`o7PF?#&QmJ4{~1Pp6UGBeXX;^{k(IyN51oR zk0Ixu9`-H-&kUCg&pj?zJm0(Ycn-J(dfB^f^_t-N$LoYE$GgNe#~bJN(L2_S?DL=7 ze4o#5c|KYLkBs>h3cfcQb?He5PHeBg{C%o1FV0fPY;_!?9 z3E?aKt;1vd`@@L-xnU)Ko5GIzjR+g)E z9|s@v9v$rI-8}4z*Ro+rUh<$~&#gh3p1QyWj}3v7Jq7|w-4_Lnb?5qjc8m9Sc6;S_ z($&UK<+8(fhD)nYzH_9Hv-3so-A)3p7RN-d2*<0QXB?y+eESrSB>S80m+eGuL$jd zjRUjTdJaR+xlIq?bkee|TxrKGv#I$O*AOAQiQ>$*rDU?Ek#{lAl3p^3i7j+2kwQ-( zMA7!(*HC}ro*@L>03{XcLpg(4LFS_0l6=s5;!f0PVuSevVVJoIf5J2jFEk#-Wf}Xi zpA2bO55p(SHN6LhsXvWAsxzYy-8R&9t=1f(U1EN(={JqhY%%q!HO4LK9Y&U#V*I4K zVwk7$HPBS~`T}K^evPtM=b_xDYgSn5&MQ7@Cn&PDbOlLUCjY28CSRe+lzVF&<^5{0 z?6JB~wp@Kp7OdVV)2SxO{-`2k=Tr`|`6|3DR3(-YRISn$WtsG~GGBT?`9eBNc>~l4 zC#5vyZfU<_jr6->j`WHmQ@U9(8q|C-(l|wg)LjuI#VI_cgK}4CmE1}CL2f6#F1M8K zkt5Q@a-4LW+$iypt0W}3OagjH$!}S=Q! zC5@9rOWh?9#w4kbbcruZeu(EuZi~Gn+r)k13F3QVAMqS9T5Kb36qP{Q*%r|Tk((%5 zR3*fVb_z>{UczI-Qo$JEY5__}7km-C;I9=V^W6l!L$&<>1vN@8R~| z=>D%ayxXC-s_R$Js;=cdv@TB1o6cX|(>vF8qdFbCpLdjXP3_p(h3yFIde_eHT-^S+ z)4F|8XK|Zb=dQM{j)=Ag9b(>+j_169jvSt%-Gle5y{Gkj`~B7h?b)rt?Y6Dvw$_%0 zwmU6v+h(>LYIALw+oo=gYb$KFX*=Dl;w^6G@nW0*@NAo3@KjBgdEBNWyf00=c-NXX z^L93^<}GSk#>;40#2el;m*>=!%_BF>;mNskcy$PixDf9(L>P zo^h>BJv&2GWM z)6Z!q3`}fKA2{58V&HxIpMkD+)Sz9*=)q|nCk797{2hGXVICaph#g{gZWs#gyfrkt zvu@}@rxpKaXF9*Jb0uHdd5#~@RmGp(MG)-h@)um|$`F)v?GyBO-4_VEUI^^Ed4kYx zv@oQbEzImr5YFzNEzIs-E?m`pM7X>Aq3}@md*O-hpTfJ{Ey6F|3Sn_KQB>b;B^v7X z6=8bfM4X;1k#Em@QC!ak(bS#;qBT92L??P)h@STR5*7C}izGcNkyS5S9Mu~tUeG&H ze6n}F_-pS4@j&l;v0WcmJf#mKIoB5?sp^{{vFJZ4nbZGE@}?gxB@ZM?mku14mJKvZ z!v}+9w+9c&m_zNd6GLO=Wd0lZLB5TGAlR=sBTy@xh3k|rg#F4$(L7bXs86+7yg|*8 zpfq`s^P2V22yKY0O)Hcg(7l$2>31k(`Yh!qgQse@5v@)$^{d^@)fyD)leQUkQ}+&i zQhyn<*RUVE-nas{$TR^z+Z;uhf$||vN86C5VG!~xES|Cmhe1}tJWJ)ls|H0$Qg9iP2%{&TO`dW7Tu!u&b^2Sd`d2u>4_LZuQkp$N6mU zY4g@$lI=4{x4!4}&i;mTx5EV&3#a3*W1RQ7t#{ete#>>0N15AFPrdtmFF(&I-m|^N z_?+|(_x0qDG9U&9LazZzRr-fY_ zJ}kT-!fJT?2=#F7$c6}3uo(KT(^q+4n1$qwmXCSOgTHHDF(nX)6}7SjJX_~DZ|7Z5)@q1}y^On+SrPkh{9p_}10O@raF6y*9}yLmHZF2f>g-XgQr3?=kbGdorEzB>?u@-S z{Nb3(;ZH|j4SSY!IrMSj`H%+*XM(TA9~yQ%Zd=fQu`2@?#AF8~N2mJxMuquNBVB!m zMlyT~M`*pSM6`P@8~%SRodr}IdH45|u_&70?hY-bl%f@+wsm)R-MY)x+ikb*Teq!I zFI0h+QrwEWLm+69@x1r{ocC}pGnr&&GQ)7~H}n1ca19E2m3gS z%yEeF549KgS=$x)nAkq>Mr{^()mmA5CR*0H-?Mn(cGi4}>q;|gmto2ZCojcKM-$Un zhYDlPF2N|<_Lbpkn~es3)+6PHR{dokEvh94&7X-Em|YUtC>Ihrrk?nD69G2IxE3)m zdMf;6uwAf6K1Uxdi{UjAJMNaai&-qHr2U9osu6olo<~pV{y;Wqjf6`zzx3n#_VFRT zVVrvp$(VINr}@rhREye^tm;r{i`xIwWU5y5rL^_yO=;D3|7dyFmDIeo^J`O>I=NBM z@w4HpDz$!Z+t0e;t(mowmi(H`ro!s8jb&Af8rmy^>X{0p4zDPxF)M#r?N|19Rcz^^ z%9$k*73+(g%1;;JWzT*$mZTP@71tNMDH0dl`0bm2sBnDVhJx+63-TZ3Ovo$F9+E4{ z4$g_l3dr7?>7Ml>!!ENq-8#c9-8y~QFNA!;jGK?Dw%L zG2d4vpHDfF)S4XsZD?}k*E>myuhOJ(Up9X`^SSYB)u-8CgFjV#Is0+mm%hZ7&kGXw zd@f3``#d(`+o!Y-OFxbNp!`(uKJDYo_q#tfzYF=e>m8G5_U=dG)3>J*6|6ig!{(GW9d|je-y!4|(yx&Kc_z52a;&*=>8vpd;^!W0R8{*}k z&csK5dLRGSr^@)RpU{`&C*PO-KhJu(?(?aauRf=|BtO$Hhkoh*>e!ccuZq6BeC77F z=hddK174?o-SXP@+t=6Ize(N{f1C0qDCy3d3rRh1sHE7p3zDzC{gvGFHYjE6yPGL5 z-pRi!-|zgs^L@wnhWAT;jQvpkF;#qXZwunpI>IUei@z_|D_~z*w>9&rC$}{FB8x?hM9I&_oRWi07fU0X=a-Sq-sNvv`pVa}ey#9sJ6>7aHm&N4%C&k@ zduO#l$H$tqj$^g^)ide_cly@%c5?O4x^fy;cHeIF?Ah4V)Dzu&uh+a~F7(hXHOZ~{ zn#*l}YuBho=!UfGbrv1($nK7{RF>MEe%x6_|J8MYnbG|R>(?XZguUOn-@SYIr+vZD zy=&7?(f%v&(oGgpIth|SenzfRo6vQ%7Z%NwVOH!hyqygpzH+U^Y5t06zJ8>*zkn1Y z!uyh3;W+6ngpzJR@5zQ^n*? zbi2IUbcDebg}LD{Wusw-@|Dqfvwg-P=A%tY%`Hr~TQrzjTRvC3x7?mw17pq0Z5^x-yIQMjcK56Y+Ap)oviG!I=+I)Tad>EV)Nz@;os*}-OQ&Xs zXy*rxCC-bT*0?x03tfwy&$*s@&$n=3D3U-FJiUHb2(a%m1ifg}43-nL;DR1=<1geaI^o| zet!g(_oD-s_rE)!r~inc?SZX9OyIGB+Xr|B>jwM`-WarSkUEGRv~u9tkj8-?gXaZ* z99$GUHuR4{RiR%8tqAi8=?*&|vMpR6!iFy!d>|rgFcH}=^l;?gp%}D;c1Fd9bw%9_ zTR21$mOUgUJY?v(@LNMG!zIJqBGwIC6_GXUNrc<*riiV>Z6i~MkB?MD?~j}v{Veik zbbe%2bZ?|ZjA7L1826}MG2u}UW5z^f#!QJ)$IOV5#ZHTIjU68q96Ky3D%L-0c&vHU z&{!%mEVei@Fg8BYCH7#XVeI(G-Wc=9!kChX7cu7}4#bR#7$3ugTgE&LFO41@em=S* zY)tgYFeKU|?A7p3p|gjN3DplP9{lgHIfFxn)r4dZT@W&VXw4w?keP#a49O2hhl~h5 z7WH|cBr0Iw;mBJ-e1u`p#)xeLn!>9CCxs6S{2X?#zk3+g|3v75eqDn<2237o8t@@x zxxY=wXTR-(O#OZbFZB%we(!U9pv0#>Xoh!a&;zf(2XuSZ28MeE1s?G@(m&t5w4aT; zcfUn$I|H7$X8LPfto@^0miQfYj`z)X()-vujrCdKc*Fa(L%SDmAL=#6{;20QyF!mv zTPKeY+dth;+N8SGTidvWS+8(CZS}#W!BXiGYq`w%j>StSt-07~s`*041hWqgmSzSH zTa`=g%M{=2hA3?89-3~lH8jn(*=pi#Q)_(0dbDw))mx)sRt`q@EKeFrELnp!7HbUh z&8y{u%qPjOnWf1%WrS>j^0oA*!dKc)@j!Cj)JY;Xy(a$C#7taed`>jh*i7`*=pqql zWI;SMyoTEvI^vfM9%Cj3f!GQ8N0gAqpnu775mYu0IU?;5%B6>dXCziaGs$zob@2#+ ztGG%3LUd3cE^^VQ6G{AZVji!;1^hnzK4*-G+}XF2oY^;q9NcF|n)j)7n%-}^BDe$geeW#Y z)m|^%&R&gnMsJcfr1!X1**i(w)nl#A?y1%M*K=QUpl5|V0w0T#rK4FE$ZPr6+PcNle+hIuIUczwC*O=Kf7M5S9L8{ z8+SRX-*=XEOz%A1p;Hg-xT4m!ho~R5=XXqPU)ceo>K!jsm)obSeB1f9&#L=vf2c;c zRkroEu4%j4O2hqRM_Q{}6s`MOZnZeK__m}pziXb~9NEk@{cO70G`49_Q$b^4;HV8|KyL)z{W7sb5e>*Hzb^uA5csT31x_p>|x&=-OX!Z&`Hpnwqaw zeD$EJlhw~EZK_==|Es!O5mIGTky?4Ud{U*ptfpdn*}97EQoeje=|APwB~Im2OWu@a z7sr*w7MGSL6|E|bC}K(yf1fBB_}jW9zVKrQh0eKNg%5MG3jA_56uizhD+tg2n4g-pAb&!ZIKMjcRo?o{d3k82Vcw;T zuem-M8*-D=-EwE7m*({R`X}equecnaU;6B{v=7`RD5_I(2K- zt<>1z75q<_=@ zNxGMQEGa(yXi_pv%hFFJY0@tynP%Kg3e0$uG&v(9X=g@#(t`{p>34=Q8P5z%9-KKj zd41;galI^q5l=)c^DfhE>rKqz$r3}v2ryR?U{a%y(&-bvLs_&O`0)FUn z4*r;yTlM2}ZfL4U-qqAgd0eVEfAP;<`FTHk^M|FaDR`UKR^at(RpFgqU4{1P+kan6 zM~ZAS{w}(eVN>jx`Ly_T=8%%8tm2Zato5byvgKvk?8jwibK=VT=QfuY<{q!uo)=K* zl3!Yxoqw=uS3&=3-@=;e>cVq17k`hb9aluuNsE)~eirYoKUfmpFuas*z{(OE)5~@> zT__*gyrhC?38~CzkyV{(t*x5X_MzHSb*6?=t*y;yA6NG;)cNbwW(`9+=>|ntZDVU! zX4CiX_szF@Znx~}J>ELCZ)013%{-NXc5HjQHlib4=dXT5I&>bQlwFHxS@$po?{Q=W zy*$hHR&jLSSDw<`(bL)^0=;gX5T|A$GCCGDVg_Ie))BYl3mtm0^ zZi&AGu$2+2(Q9HQMvIo1xcU_d>5t9@o8BdoK1_ z?B(M-)w|6%(&vt!gYPtd-d7$_>Gv%l(SKdP^8pt9SM~eQe`x>dfo6faz{bF{1L6mG z25k*W2nriGVjwrLXyC`-1;LvKwFG;GtQu4vq8fB~@RAVU(6W%s(9wgJhrJ(+ggb^_ z4nGv?9nl#2AtE|#c;v0Hj7TzUT-3<$tf;f$BZmAA|1!iZB4FsGi0ecDiNJ?tM64Qy zL}m{g7#T2papd2_|A}lK{xLEvx*_s-v@|L&+B?cLCOT?D%*?1$F>9iJ#_W!wV~$3- z$DWEB7kegZBiK2Ze-V2uDlK+jR8{PzsP5QBQE1#BQOdZWD5p5^{{_pCWXz7A;LUk-i3BXFA04Utq9!`{dMrb=w*X@h8qliHvDDCvf-0M zY=-v^`ZetIpiRTP2H6h#7MwhEa`5t@Ed%94cMg0uL_ToLkgGwhQEowpqMi+~j|v*_ zI`U&+bmZ{B?1*3er$qeGzbrhv-`w!A{c6I}17?Q}4JZhG?H?QJ;{RdrSwFAA0>7&v zt9>OQMLt^wMflVN-}R0Q7JEM#xZ2BTV2mX+jtM>)4niM0hY{ZE?f>>lw#)T2wKMUY zXFI{;sm&R8opr9;Fl$4%Q&uBgOD*@gxLLk+-eA${^x53rNn}3Rah%ynhx5ws_C*TH z&QjrPH^+3Y?Ol_zHtohgt%HqOtKCNZtdb2ESc(kKS^Qz}-Tb^CL5O-ktE?*}RYxnv zRcgyGRV*s6E6*tlC=V~&TlS#S|*^Rp!B##z^XC1v{m+L)P~=8-ugttvzF z^J>PqpEEOpe%fc0rq-qJPkor~o4Pi=uBKHdpG^Cdd?4+5@~*VK$=lNwC2vg|1vUWY z9g_E?86_V{6C|HY>rJ|y)|eEZR-E)TEj=ka?ORfHT0&A!+PfstulGp~zdj`m{`Ea+ z;;-zaRlh2dj{fRO`tO${x!{+3vLHP!IWT=?^3wFv$=B20C;v{bPd3dkO_`W6Jmq4> zmXxZDXDObUO(~l*oxdk%F8FSmb?5t%torYXSsp)3v)BGunVt9}Ia``)n=?0cTh6=G z(j23ok-4jWKFIy`(>Tv3ZCBo@w6?t7wAuMHf92+<{E90GNdH=JGd-lxB;!@#p$xy@ zY{t{yn=}22x-wrDtkh%FZ?fln-buEH7-_QL&}Tq0+MXN9D)nHC6LkEUWR>wCZQA+iNDb`P8y)Rkin2 zXX?hb$JI0KeEokNZyIK(S2T({?V8?o<}@wp+TU!{J+S3tcWcYio*S*kz2nn+T~t5SjqRL5in@66Th~Ksd-pgxut!U`_uOC}_r|cx``TG| z&3Ud)6V6}NHu4j6r}PqXkRXMuf%~=(3xjDNq=U{u?l7CssjLMi=Mu4GZYI8sr|`pw z)7g8O=L+wWp3l8scy@Z{dj>$k03*p>k9~r@8hs{uIry&jn&x}h>!j~(uO#0@uRh;A zFL%F2uPJ^yul;^v@5g@D-r0UW-hF;y-d6r2y(9f6c`xvv>Al~7p7+203%nEk7kL+e zslWsQ^Sw<2W_h~=O!n>{FxopZV5s+qfPvl<13bKE2AF#<48Xis26TIE2&nSf5|HV& zH6Q`(f!F4MGhXWhc6hA{Sm3odV5HaFfB>&)0Sd3N0UFQffFjR90q;CL11@@61Z?%h z1EzR(`UiRz`^!B)`qz8>>;J)HpZ{@>`TjFKBK^HQto^m_TE7(cJipWKPyJ@OANKQb zpY7M<*3a*g8|ru1?YHkAZV!D;+_w1^y2koma5eRv?ponv?fTHC!exWcHJ31-X)d^r zsY{l3mh*Y`ruo!~s(dWJJ)MLJ!#y5Ka#YJgLnWuD`H%k_>9md1{sEuK5fwixX|n>X5D zG2df9$lSud(CoS0pJuUkhGrGEk70~`xKdzSq4>vUv%=oSK=IuAzG;+oglUddfyrE} z7xL zldRcvt8AUASf(+#BHdx)E9H&9N)8xLkPyZ-;$uc@#Bw7>blUK^2*|afD+bRA2ZJHR zJ$WwfDW8u&k#%GKvV&N>)DjyYeTu%8M52L`T;!#AKGILzDSRQ?BlL&6Yn~A|1m1+7 z0H}(3H+-!A4pzn6VQcw|sGc)K|KW}!j+_X2&K?nlv4rp!b5Jmy5eVAo-TIw0!^`QN z{56W@ys16hXHw5jAP=xDI-K3BJIz>)4H}r)auxByQQ!>yybXvQS*@I4b7S+dGq6@hfQ;u zqMH;=b&bCo_rs4RlS5>#OTq>aN$HuA5mOT4!C~R9jwm zzV>EabnW80z8asp`!#*FQ)|A|N@|YRey*NUyRq7_*1NjBrmpHk&9$n-HB+mm)+nmH zYVs@j>hqPqtEW{ytG25=T-{i)p!!8cWcAJp`|1%DNVT#8LPL2$)${UCReQ?sRgEt{ zS>;l`sY+8euj)tH=&FllAyq5O+^U9@nO0eqah1KLEtOfNg_Vy=zgHe9eOtM*^ik!+ z((9GsrDrNVVEkNJ`d1}hdZ3ajIat{Z%{R+^sAvd0AOhk_=WQo(D8dW`~ zbYAtA(*4!vO7B;{EX}OWE7er@lseX!myN9nFWXbIsO)*o$+DW7w`HcaHD$52^737^ zq2;e@SC)6wUMcse`&GWEjw-)d=UY)xH?P8~{(Qx>`izQ8^+;t&eQ2e1!=}nv4KFIM z!{|$8gIAS9{$*QZJ+2zmFjIB3;f1Qa0dEgz zoY8)`@p*e?Bia$zG^68aQ+!8dlSn8=C|thW?ARRmW7>Od0S=oAk~1L3#y|%y{gKdiR}Y=U$r0aRd&?%0(YUWqT_PkP<3D5 zJ@q(^xbul-b*D(1+qqafuq#P>rOQFbbnVj3>MqtLcK0JKdybNudMe4Bo#lPnNi}zu z9Kti?S$+~#$3La|=_T|o{W3aL|D84w*fVnlTbL(;Y=#neva!Oy*t5cFwpuut^G42a zTaXs+D>9TP(5w6(Xdiz89i^{CZ|L2z4*gmTh+Eic0f{vTLh(@H4*VZs0^TGf@xjO- z;smmlC`S?rZwDpE?9isGb?MBAk*(Gws|mrFN@WwKY|FjCO1uHnPi%5Goef_nz)$0 zG8t`}WwODv+2j&D^_^sDZQ5ZPWNM=rXBwwiWx7Fe*z~gEmgyJ8d(#F*7H|OSOnsC( z(D;`rmMiTPhn0Sc`^s>|cjYKWy>hxjV763YZMIPnXtq}|!t8`%uGuBUX0yAB6K2m8 zx6R%wKA0sbvdq#Ib!NX6TC*yJ+`LKQV&0(`Y~HIFXU-@Vnd8dM=Elmy=GMw9=1$6I z=AO#$=Kjhu^I&C{d6W{fh*O$dj8%GBOjU+i%u$ZDSfre9u~PY`#d_rdi!I83EOsex zSp21YVsTvg-r}?}#p0YY)8djc&*G}G*y4t=3ar`Uy0Y8ivXZeluf#0RD5aLCl}47Q zl%|#^l@^vKls1;fl#Z5%lx~*$l%AH`mEM+rD!nXM!TeHKwovJ8IR|XI(#di>Se(+q za){E-G6e3<41o3BlxCJzN`QL0c|WGb31zA36K z-Y5zz9x75Tt}5PIoK)Pi*rhmQu|l!iVv=IHMYv+3g^ME0LZWcD=rmPW{5C}_KAQHJ z-!`o?KW>_5zS;DP`FztS=A%q6ng^NgF}E{aY)+WQnRlA_m=~KEn}0FUn%y<2FgtAW z)og{yZL={Z2h0LY7Mq!v#F_ON`0cMgX|80C*{=j&>e5-M!Jl0q)R~nbgYK@-AUKs6> z?J^oK8)4)uQyBG1s|=H*j}1>rHyBQr1{=CaNdvVc!5~2bPkl=w48}-=1`5e{d5QS2 z{H8cozD$hE1H|8DI?+MdXHm3lzep$>F8V0NMH{6*2_NY(qERx6xFwMgvm~jwnPfkn zBaXn2i)k!c{2rd^-h?HH0=ZhhRnR8}z^Abb1k~puNZ(s#AA@dJQDAtva5J(0w3r z-3BsS>q}nNw&><-AL%@`D|DS2KivmSxAvgst#*QDyVgk)uI=d)XupH{%G@Q_H6D+?uqL;-DBG` ztEaEqy(hc7r~7gD*X~2z|8&pqUfdnlJ-FMv+qhfX)znqo^$~tfb=~S(*mbBYxNCWr ztZQ^vO{ZVi`%dGoQ=OX5rJWU>5uGWWrk($Fwx~~cepYYlyr7=mxlSF~IZEx?>8_T8 zuC-R(*iobY*^#M!+>xmMyW_EX1B^sW?l`6n>DZyR?O3fwI_9Zc+b62?+T+v-?GftR z?E}Zn$=cVw#CIzFmeI-aQ-I_|1! zIKsgOsV9K`%XHOy^#WCj zdX*|qy+zfa-lw8K55}nTips6?u`0CllWKftj%rzFooat)uj)o8)}Gj@Y%lI~0sWUi z(0>^QT%d{V!@HKW&*<6?8ZdviAMLu`ey8hg`=_pq_M)yjc$$)J7k68AICY0~M0U^Y znA^R(V}JLpjtAYz9hu$r9lhNI=)icXqkG1x*Yy0UzSwgKG++|cEj{IG<6c%hsMn@* zR`1}>J-yR9Z}o2OOzyqV+1UHOQ`}eH>C-3Z8r$dAwXtth*X6#oU0?dnch&WM>=J3} zyL>dV?g^T}?(LfC-M8TG)l|)+Zk6VDx3L!O3D)-OnWvr8b3%KpCqetBr$t-cqtwZJ zqjX`tD|D-S&*-lAe$-|4w&<8%bJD9XhMd;7kv!CQlYG^ePFD5xkz$P_)lV~qnxWZ3 z9n{>UUTJ<%HJUcib1|aBwFBr?+8Ok9?EyMh`;^9YC3KjMVE)t%WFF}jG1aeKO3r0<W?xBF-e}j%T zCfvvm6h7vc3TydGLTmjG;Us;J@TA@WN!E`=wEEqMr{F0vQ&57O5FqF$fj8PAn26d6 zx1*zlH$e;L2l`mp0u(YCCPnQ}J!_K=Etw z3~{P>x42AvQ`{x~2K0&=VDt$jArc44cuAyWsbr31hh&%Jl;oP^wj@#VR#Gha1(f<8RK&;P@u8=lM_ev4jIjOzuo-|bURytLdDqSxt0XDr# zdI$8{J_2*TP-Z7n%e-YG`5>9Se7J0Ye1dF*e4cE!e3c9$4sb8_%g)PB$nMB50RQ5a z?4$fY*$??^S&lqGRwDl-tCoM3HG;K*b%N<&nb06XCI_u*3xiiOCxdv%)LzN@8@!PX z23_qT@LrrjBIr_oluZIn>*-)~V15BCUjpwhhxL|$2KQpnxtL~Y-8p^FeFPs3~@E%EltWxq-mMVd$3hbdvz>L@< zSt45^86*2cGC&qAv60zJP#G?1ld8md(me48={xZa>2>h|=>hRF=?d`}=>%~gXt`NP z?Zk`(5m!l?MBgMCqT7-eqQ4|(MT;dHMRAgeA}>jQkqj6UDseZFDb6FFiC+_E#OH}E z;w{81;O|9=BMC>b8zB(Oi7HVq{z+7VUlJwa8$?&}QKD_QlV~!oB?9ntLW18U>aop4 zA~u>hgE@mvHHk-I*|;STxq8s8cqZsm-$R}89Vm^BNAoZb^dZJ0JFy~U9QFcn!Hyt0 zbRLq91|v6sPPra!5DrHZg=XkkVKcG{IDIj~z4u&^h02${J|BMm%dTFE@8yXZ4?Hob{{N>8It(&6+v+LfM2%jsZR zL)p+Z6b~wHHB=(?je1Dk0=m*(Y7aG^S^+B5lPGs8l9E&2R4-{xm4m|Dce0y&PF9nb z$V_q{nLsWlACcqW)gbaH=|FBLCFDv{r<+MO=|+>qx=8Y;ZUC@WeaSmIH}bU3f!wXL zB-iMS$oV>)oUCJXBX!-nA-XnQh^|Q&sH@lcfqCod!0L6rx&~cv)a@YW z=#G#pbQj32y1V2NT|9Y7_YG9)^5Oe6kw0_{S)wzh+I21zLJow+*a*s>oJS2Kw^38c zbJR-mIkg9Lna`8e)Du!irI03c1?fZgk#V#EwU~CN_JSJTHF^&9j^0V-(^siZI*~G9 z$|zrkrN%J!^lB!GKE=!jZRWq|0_GvDW3p)*mZT$r(YlZw4?ODKOg#KG%h`O;WoDTE zoEtlf8^<2vwy`gO$y>&yvKX%dCXoqH9z(dTpj3X9KL+aMFS$CtloRU(yuaR^pR6Ct zZ`W_BD_UX3@{?q>}DAIoypn_I`Kd^_V34%bmcDC@I z;Gi&5@Sjj4$PqdSyMcDDK>h?4^cB$L{3_fBtk(O8G4d0MK$?M-E<&#$Ug&pZ9NLC# z0e#LJXds%5PDks}y(oe`K%IcvJ`|YE3xLPG5A(yW1JV9HwhA;lk6~@t0}R8zV|I8Q z5a$tmH132i1TExkpnrS{G>`A%XYlu+kemtn#`Sm=PT^YI7?gwE2rtkmjt08xJWvqc zMJyvO6WfW`#NR|FC=WLhZwU@KNJ>!|;VWt*qD3?@OC$qAgRSVe$WL@j6e)Tm8YlW8 znkUK=tphUBZc!cRezuBE1BLIbh!UL>q2e2Nh^

D8%Gp5B};^jcK_=;@LSn4mS&W{FU%m{enzH zZ-S!sUc>;Mi>Q!DBn`1c?jsCb4W;1!-UHG1f=~`x{Vl=?!XLr_Ac`9Z{}QUe52g#E zgik>A^=|C+CZSUj2zn<8E^IxdYsOZV5LB)Qm$p zKhA zP$OQ<++{|Bvamn1hcN{Of0CZY)Y3zlEZUp-NSiVL(I|77?gJ(N7VxGO^kO=PoJo+Uyhkj1YrJqpq=|`~a9<>D6*h}f#)H3=e*bQnWeGS&TO8p7bt*~r2{eb!l z);UhUqt4P_sO$6(>Jgnqy{3OtUtnK9VV^m)nku3Ds5+XaI%pIqug0L@=|medL9`7M zOS>~u=pbex9S!Q9Q<7a|;z``9XTqtYDO#yY$^`LG1H@lU4 z#$EtjfHqw^>M&lTPCOg4mGcTC~y*_3StBef|UXk zbcbDpZv@f8Qo%AIFE}Q25k3{h3A2PNg&o3^LIdF4`y!dbQAnF`1+eZ9BVNcIWF+zl zS%wsW{%|L96A`0{h%;J*grU92B-9v~@cuxEAA_ESxyR@sGzqr{euUXWkuu2H)}~_?L6wt&iZ{*bclJTZLzX7W)@$ z6#g6w1=6WEeiF09cYtSKhIL|-urfFgKQT8rYB~7(9`piMjvl~L(KXmhbP7=JBCsQ< z3$_lGU{gVjKLX7|ozOQZj$TBYLAC!EG86p|2}e&L4(J-tXCH?&B7xw4l*oUGR(KLA z5w1ZJfv9&~*bmtwGyyU|w{WO1SLiH!CPaisg>{0(!W6*}h-$XNg91vhKu{!z6vPW` z1jj*>eGx=J=;P`w1!wdc{c3%lK2HA(l&z2I1>lFO;fnjhTj{U!T7ElUz)$60@;-lL=%fd- z<5&eyLo}c|{u|WH-!WgAOUyN9J7|*6VkSb~=EpcP1mta1bS?c3h-3fIw}8jImyV<7 zLADk}>nTUNghIhrGy!|;7qyD|kNN}V{He9T!JI^Okb|jo(usORN~oh`C-9nolViw_ zq(AVRjmZ;aw{A08sGCQA(2W3oeLwP$&I1Kn30sePeq)ZPSU z$Z6dt?E&3A?N;3h?P}eh+WESf+DW8>Dm6y6Q}|<~md>)%9xh+6IkATc}ZK ze`@NqA2lV~cultUfhJXZUGr6YR`XGNRFk0Hr+Kg4s(A-2kvH0;BbD)WvHS0-s}}E?;v*R}VbNK25w1*M8TTYfE+RTD5MV79(ST z(=kmOK(5jbC--Zok(acq$tS?-_^Q217HD6Qt=e=F(bbXmI+_gEDXCeye$-CgSn7su z9hI#6hicKiqKwI6Dum>yd88|SnEZo|C$~a3?KTb1>e9iKj$TB$FsG>T%zJ7tQ$syv zByrv=4iRp2Q~6yVxH30r=Jeb}}R64l)7UJ7yl&%ADft+0Wc$ww*i1+VEf4 zF?<)hn|J1(@sqep{vapSzvTw&o492_yStzt1>B9je3$+$ZzoUzlhQ%IQ!ojr;fM6) z;2*`pE`5;DT`*5LTW}KmVj}Q0+JxN#Ys6VN5}7RAf*cXvK|TqykRG89@kCtE`N(+m zGO`!VM_!;Z$P9*}hS(-F61xXv*aGx2hGQvM2-c3R#+1Okh`{r(<+vU@556o3SQaY$ zD0rUtcqq|`&m&BTgG4y-2r{+|;vCUIe1q&pP4t7zY=S5bGPL!Oe_a;66nz!t06VJ# zaxkel5cpX$#4+N%;zibHJ1LAT1bne_R6W=(rD-DrI3(TUof&8Kz#c5MW>iNke4)q){?IX^hNQI!fjv9V_#e zPLz2{C(B%+e_#)0DV-uS1(U-v0!#?+Gm^0~jbx;(Q!-rEDhZR-NP=X=5?@)S#7*`M zYUei+L)jww^D#-Wbc-ZKI#2Rk8Y{UX^_Bc3HI=NDlHwWCDsimzhj@VWnb=W! zL2Mx1BPJ!w#PyPK;!H`1_>II#d`%(~ACPE7D z_zzLMc!21<*bF)>8e+D%5Nhf-goF43!HKpKWuoasf@m;tS!7A<5Rv#SsIS9BAE1MC z73%6;cpfnae@aB*M+gUeA)&{@i7L#T_=I)gSFj9x3-$<~g#85^;d!_b#Dfl0iD#f{ z$T8EfqriV!itfZ>(8-t!>Wc|c468&c&;;ZydI33wZa|iTe~(1{5OY+Hs1cPg4M~Tq z@sV&3T$R(2rI2UFLbUc2whG0;`0pVk)T8;|_2-g6Ac)Xwn^2@gZh2Vss zS3h5n3pMvMy-aXIU#DNLPt?Z%W5Wfg8$$5(HM}$UcaFc#m-0LL_xue0A|J|c=56^& zynye|HvvZ@6Brt=xpeL-_ZTwFqd?7A1|B|!3*wwPYoKQ6A;)ZBU$MW~Q^3er&7NmR zvpc}Y&u68OX|@0r>IY-O-evlj157bg=O39N%q{TpN5RW)05a4}$ghSm&mgxvM=O{e zK$=`cx6|VwuZ*O#Xn*=EZBM_X4e9$dM_r{=)EV&qN9a_@{oX(xco*`*v(#*QA2pF) zLye+mP{Zh0YA`*J@}u1#R#;Fb;D0b$OHx!l*+u1&t<)E?mWn4!sJmn?b%ji&PLp4# zL*zSZCmBzzC!bPF$Vb#H@*XvbyhV*8uTf*ki!eP0ug`+A`*~_Ed6`;F{!6VQA5a^~ z7u0q#f!a%cr~ZQN9U&{I<76{zvj=Kqj=DyQ>06|dzDGLIk4S&|2^m7aB!|$6WGwxI z97X4mpr6Z^mdK6VaPo?VV1ymosio%$! zlnHYPm`CR+Pv$-~5PWJ3lR-^oYN_SG_1(^x(kB^T`W6#MgH|Wh*?VaBuUy5%~)hqL)!S^cgw` z^4aldF}fTwvID3HyN23h@6aGD7mdSOfsG`@R$vy`cFYH=_(<$3HW7P@EyF%zd$DZn z3b2vhKs?9>21gTSfOD85Zh{ZM-SAjE6e7X|d8?lht5(Ec%D+1gi9B4&iAPvcYEM!EC2Aa_n!irb`Y@;=V3vfL= zi37kgItE;#Q@|@a3p}C=5N)o4-GccCU{AoF5|J>Cf++NV_J7eRn0O3)qDRnIcmUhF zO;`}u2t(o`?C%`G;{O0i=@g;DPeKGc0iEolL(fy^`ma_kt$t^4CSxDEaZ$AICa#?InJ*dEBA zSKtqTMtKSwhHt|B@mY{V566VKAEv@Au_B;OrC^;vpeh9)nvDK~JwS5D z;IFa>$W>!eFKjSsj=4kU#R65KQnUbS_|K>cc?gxuY4j_y6@84%M$aNqz&Ub47a%Aa z3%nz5;2jwv@euJZAw|O7kWVi{Zb3G12$)B!g*M0}A&!I!yWq+y5z2&Lpfhw&SPfNF zhG4z$J#gRe2!=v^?e#=NAHHCKgzIKKdlyME@V;-e>s=@XsmyV*WpV z9Dj}<%MAyutzc7ub@ZA2#J*vlvQL0?bc;O!K7BiT zgk1`LeGj&(wTM4 zH)av!`qP-_%oye&GYtHH2y`3#nX`;5(2Q)DL(pf~%SfRPK!H%i(d!uv^a(ql7H^=J zFy-_@CZC=QUEG;WGChU)M2~0Q(qotx^avmm$1=C+80HGp?`I&69HXO{19UjE3%b7B zpswFU2QcetA7&Np$t;0de*x{r%%NSGnJ}FOHi`CN#?oHQNQghtFdYJ~!(sVg+68h` zXC{z#V*1gJVD`Y0vxV2zU{=h4|4nURnH{|6zy!c&17N*=u#Nt-A58nh>mbIT4h9

K>gwI66`%%!B7|I00F$L43m_cx?A<&lyg=3GP zXG8CF4l^9iMJzoZeEUMMrC`h9^(wH{Obkq;;k*rn;IOGV%*?!?r_dMU>#3-EKJA4vWf87 z$&4fXP5#eZ=t;1A0vyX&IOad#ILE>;l9K#&=PV?X#FM#7%1m}4PEME%W zYbkt>MX;}Vu)UdJQ(=1(89UgPHJHW!Y#d_++q8r4>PW{y|0V`L7X$0W!S@*r$2As? zaUyJQ3Y@Q*U~}QzErerV4EtXS-(&gzED+935Lhr+2v{h+k_m%z7D=y#^SYiHMQ?;_ zVk0SX544p&2iMaLh@tnHee^T9zTPkg=|s5JzA&fhWaa|>gSkehGq>nG z<^j|+Pw8s7J{y@YbQ_aOcQKjJ|HH(v#Xd>>}KW~FuNWDuj@T~n)$(AW%AgE zOa=R%X=KwNc9$@E;EWk_8pfFuu|b>-aJBle6S!D*KJX0Jb1T^c+z#MqonW7E|FYk> zS8P6)#@2IHY#-Oh5)i+wcpJ{2_v420k1C(NTPyegQDEw)5-tXZR!f$Iz=v;@|5l_-s7|WFBka zWku+ff_dNx_kc&dqF*U^tv@Enfu2r895~kMV<(!AgRK2NG;G(S;(Btkpv_F$wo#X zb;v?QgX~5m=vBlXGVUO>1Q~-;K)JhmSpLPWDgj-nkvQobyjBYG#= zCMp!25%mC5*+~3Vi-(IdfvemiUIU!2gJL_$RdKN7 zrFg6)O}t!EEj}pe6W^AIB~W2WDkc6BQZiIxBAq7jkgk?QN)JdTOD{@RNFPgfOA{q$ zr5Tb3(n`rks1I|cdPyVHhLqGnY6P_&kdmcgvXRmWKs;G4TO!>j+bq2#J0yK3yChAL zJ&_j5zDQeSxl&$MBQ=w&rM_}jIz%p)O_1Bl=E{9#E9IfUSB{e%kWZAImd}=5lP{G$ zl&_V&k#7dZ@=jT<{D7=RehkKk&&oLYRhh)#u1sn06j;P>Wgam8>u-=E8({EL7Hp6X zGl1tj0rojkRw^F?!h3H&A{v` zkj{~Rk&c%?0;cgLX^{Mw)J?uyY9ZesmB<&v`0x~Ihb&rJB^w~kk-11eL+$$?)V$|p zYTy}{Nao6tB$05ZqMhs?2_xMqDU;5WBuc|1*QHjHeNvrxvGliig!HXAKzd1RCEYF- zOXomMJ51ap@fPPwEX406BB*I~qP>y^(Gp3PXpH2QsGsD5$V9S3q!!N;rHf-l55(T0 z17ewIu2@A35~mS{;s->d=n(N)w1~JWiXyf{Ejv@xjfaXdaC6ZkybJQOG-4Tki-^T{ z5H9#Mg2#f063m2njRET$OUGAX_wf0E|VXSOrppenQ>>L-HiL4OxUvhEB0RVu}(-D^e}|hI|lSK+XwQBWr}w$Oy=( zoZ&y06=VvF1rMMzy@CP-b~Dk;48{$FltnNW&Oy6EjsUASO3b|EG<2DaGU-HP3c-G$iQo!AOkV1V7- zU9XLS$b~yIXTIOP@B9B^t#7dowuW5rWr!)c!1+6YmeP>baY&Bv~ol{8aW~zwH${Wl^q9& zC+~L@ckFZc(egX?I&#u7(d>LSllD6jk`6e&B<*v&P1@~vk+j9}Bx#-FQPK*>!=$CW zE_FQS{nMn?j^{}m950i$vfVDn2lo4%6y^AubjtA!obfyB0mmoZb^J}QUI4-B#6|PBxMijgv6QV>rYBf!YAfE%XeF9H zmSN_#Y^=ALj<|hxQPCrR8%bq9H!CoV2`_1H>%ZD$H-i*scVt)|Rw{RmPH~0N5;5 zEIX^Lr;_Se>5xttq{dbzsRgYat&5dW>c(qVaE;DHSvt}>x!-5xa~i3Gh|HhUF z`>Iksj@y9qHMV{b*ZV|!XEn86(_UH4X>Dko`P`FLbNg8ztU;_lI@tPd4YYppIl&6H z5{cU+TD@pJxVJ9+){c91?c30LY}K)DTeYmKR!!?1Ee6l?sN4IjX&tnJSb?{mwa;o~ z?dAM?+^r$6b@{F~zf~tPRGHTb{9ex5ZI!mR(+?Zzk5%-|Vk^HDW@WQxS*fk5poPgLRgl&6&tD8%~Rxvj$@t91}> ze4j{-NAAU|C!am72;LvZ_KiXNoE90aQ*3tfSU#Vjodu7L6K&CK?Zp+*LEPl^ z4&OZxjbR7WVBaz{Uq0s)S4C#Q)rbSaF1E2U@;YI%Ub$f|7M3}W&!M!re78^}VbA|I zSBjtJD)F87U-<5W8HP_OSg4vyU z-9??w|!eZ(Q!9^P*^yNZowN3q6iBbJ*j#UisQYx6b~(}^5TFl&lYW>x%( zN@9>%o>p26G>a1h_Y=WpLD9=BAiC4Kn)yXHS`S)pKKC^XA!~|?!KOd^mKMXza=Zqx ze+Blf#J+)SSB3qn@>(4XvO4=z=l5!?4_sZ0$FG@T))VuHwJ$e2uzq_lu5+;1Ylesj z?)N13f1bX$38(0(xkdbAhO@HtVc|4muqZCmulI$Icr6NwudG0wB&rF8l}6KAoq6Vi z8EaES0M9pw=iP?*Q$OpHn8q2ggjEDi zQblQzRgaai+kx2kkz!dx_z}oaJnIN6(k3aZ6d@ItE=#ppS-Ue9(r8JQ=Sx2FW~r1M zDK#Jx(35EIc-fK`$+_e$a%DM6Zo?|w!{pcUd^wr5g)=A@RPrh- zm2%1f{G&Lft8!l%rF>Arlq6+~;*F1#M}44_Q-3NARJ+=hsPss+3M+=UgX1)s2=y}c zx_Vgspx#tX^|P8yQ?)W$9<8xfS?i;@SXN}HiLK6 z9S+iP&zGLliL5R4G(0yGU)$%I!+y-Os6EcJB8=O5_9ve0?C(7L+P`^**b}j-P0z)4 zyM2Q_vwfdEA5pjBtce_8e{Qc~|7ve&Pqw#(iQB`T)oXygkk?4!aTDy-y{6mid(E{s z_gZXk$GXW~yf)fpI z_FGPyZ|~(bgErRQ-D{}5Bj;-A6>M+d)yrPPtBbv&S6h22ug09erahNed3z?WqV`l? zx$y@x*i-B_`wx4H=NtQX&%5?lo^kfuo=5N{x7rVRF12s=oC2R~kbN@uJis%^-rBRA zy_%<=-4DK3CQrLv_Du3j@Oba}-s6tvJ&&`Vu^tCK_j_#cT;~z$8RjwEbG%1K&wlXB zS`xtw^vLB|fal=t@g2PQp2rK@36Bf5P4F#dfddZ!18(Ng(^kQwzAe8;37%~x+i#HZ z2exn8QQK2(jqN-baJbgbHeYLI8={rAwPd8YqN=%B&tBFJYj4$6TC6%=+Y0(V8zg?9 znn7#M>epq}J8CBNAgq*m>O;Kw<4R3D{Cw&pu-z{B9#xc^N={`zXuv#1{s841ESDW} zDP<0`GtYbge%7wm`tx{MW8Lg`#Q5+Nx#Y*C9lf+su z+)1K|=ph0`U6EH5f#H)%{59jv*YI4gvaCN{7e;2 z*EhptW%aKhw2zEk#yMjNF}LxsV0(ejHa5nv?s{J%uhEX!TRkI5FK@il3mCVEzeVdZ zI8g$)><4h!NBRU-pC6*1gde+KZ_bMI)nQ?j)K}f>k07517Moloh9^jV5H07P|b%EHp`hopX^Mdhd90UAx=A3s}1{B1!I-;mQK^z z1m0Cc=U+Ht32-@n(tgn5opotJysyo7weh5a_&vy(#CyY8)2V>V*}(}@)6#&`r2)@O z3$B+Q{4OK!GlL^$1@H6$zs;jJ1%GV?_Sy;DH5iGn zHje9`29Ir?e#E&FJbANz)wx%H=sc>wamI4bH}pTwr^F{e(iaJO0o~Hefo<1@VcbG5 z2o_h;7^(+?!!-bdYj14S`x^W75HPq|#s%#12l`Iqy&h#G=;saDxNoF4-oXP)Fv=OK z*~rLb_A&~B!3CI0j7H`zaJgvg^1H@7ywMG&VH_~inz3d+^8wHOGjY}wv!m&azRM{l zgUc;Me{Dpc9x{W`Q=`#Qv(Vow&`rBU4silI;3_u3Gtpao6XW2G&BJC{hi$zVJ1Isq z!M^E&tuqiCXq;t$&85M1%k8$~4r2$N!cM$ub%%{O0=se+_U>BCi-?5}EXBgobE}f{ z(`txk-UVGWOe!i(mjam;jis$pC+UzhKsqH&z-yl`J(e~||G;znBb_E*cT-9)zm#&z z@lr8#Qw2Gr93=b7Ex_)&$@SzRa$9+#94v>*qvfUY40%2CaF@JBj(|gXhIx5KJ}uuR z*8L2&+CT7`e#`e{r~Fu!lvlE>eB$+&Y@yv0#gaV~o01B>mQKk8Kgx$_UoIt|l3(#t z3d7_mO{)U4t*%1OnNmh+sZ=5kSOdLRN9m)~R|dj(8^SL`l~(Y2+w-}z(x28>>7WEF zZP}(3ug&?~l-5}3jn?eW(K;!$`L%}9TB*tw6=`LZ7D{oBKhds&=4dm!IgBXRAXA1Hn`Cq~zZ>n$ zcSdtE2^xTT*D_BUmB}b5Wo|JFm@ADeaEt8bMC9urXFmIMvUIyILR@N>cfm9`Uv9yuLtn`5Axk% zUL*A3w2|yTnt0n7_8DiK)h8L}^l5M{W^wK?Fn}erRb0zPu4$|BNZ)}j*k$}6c9256 zUolP^sf`PU5BFBccxIF_J{eWWst7U^vx$-3>;Pvq*eGU>LQYLLYMYC}5H`|>`;39~ z+gLdGv&?(OQuC#;$^2sMHUApNOq&^JW-)J~sh*h?%ui+=^DmJ{4I4Z&*&M!N0e1dY zZ2u_a*G1&lef)}d_#}VK4Ls`uB0D3a1ino*@e&RBLv%rY4Haqd`Eyz;L{V!ue%VQ~ zMXriY*wlmZ2`5<&G2ik)N@c?zEsQT)(Yk;S`@m`oFE$u|cnm)JTq`{pD0$tH*Ndpu69>DsYBG>>O^(08m5M*E7j@hW_7U|PHZDW z-KCyU4`HhxQ=h}q{iI%16VwM-?yppD?K3g+1Tqy2I7yzwIx=dtvAddSMTmTq(R#sd z>kG?afEEPDt*$mgt4}nf0TzBEZ3Txt;u$11|Om!pXh@65o~4Ldb~tx0T?S#W_Y%j$4Fd7S$$e#n`_X$N9HS=DV@9;ru>G z^V5R00$L9(m)23ssx{Zrb7Z?#QM0&*Kk)cJsXp2hH67U(9@<&%GYV$%KJ^PM<=5(3 z^(p;wAJ+11dh4n>N1_|SuIf|*@s3KWAK(%^1M9w{*wl+kiW05FD~FV~JjVyhdgYR`Scy?)C=p5soZbG) zMs!LTI%EhLB+b!BW2U2u3?}u*O+UtYKhI{m>0v zEq`=F0T8Jyc)>O+(E1DB@`XA6TI2=G$%LM;6EhU#J^aP8`7YLo_hJEd%}j9{Cd5TC z8jNEwzxE+NqAT`wdvr)kv0F3(`L7A)5rDm2Tr2?-nJ4nGR~|6|`)CB%$Uu=A=7fzs z710Y!q$gNNH~jlfSmEtOyxC5?H`|IQ#6E9<5nMq3oC2#jZdOEYl_tli7`9hou^${_ z517O*GdKLE9LO~vvevS>+rRI9&{aNS1K({hvtqMl;j^n{19Qj$CX!36WZ&hqrL-kx z9`KQT%U>`ByGvvi%NA^I!_TYYdA(Q)a??b_W zLSX+*qCaLJpXZ8XbCD3(nI2+2tdngbiwLJb4~pD8gZ$!@CS5L|E+HpOJ*zRP}H3Rb(GnDS2W$AfMi;Ex?!27I%Ebp_1w zCV1+7M%z0VW~K*nT+B2V7SW}&l}MbyBXVutlTi-3*y}slcKG% zQk+#;x{ls>Xw{NlT6Kv^HD>&`!3*pn{RXFTS_33a8jL;}E@faYWtJwv{FyH0lIBQ$ zQW!I5q1$_`%baV*+-nCbsH=2L>LbOVbIwR3q*!U3bWxhd91WH3vh^dZ>X*_gayQmW z-=vLF0v2{MmYOc@CoUc#3F!oO^=ZjNJ}0G;uV7!_B5L+P$}T^b^2l$=_4q6mmVaVz zCz6HffEntLs>n&Ol;9?j|4|1PQ%x+xs&Y}JM>)9x9&meFU%9+If)>L25V;Z-c_6Kd zJXWqIk7JLq*pwl16TWN3YdhpodwCib=xk)sTp13k+ykVk4;dx>X#KI|`y;XXx!V$6 zm!T_{afam_X*qIig**kjc8a_K{ka8=x{V`kL(*+=OS`#9yxC~fnY03p6>o9vc^ zo4KOR9A_hK9app(i+F`R03V_c=k3k)^kdIKY(0$Y9M0B5X@h9}*rN|tb1!+6+#Sog z6Ti0N*GBwWi(dn|ivaGbjNBZ_S|7Pu3+7iBSsgC5uFPc<}+#Wy4VdVoC%q_hGJA6WX;TGff zBAH~Th)+a;`|rnE-_3a6X+5;ITDO=XSD7i78SfVv`*FkzP7*^nfLz{6jA12Gcs`i^ z49gEWoeKmb3(bql$6!>l2$gZEwq|GkLJi$zAC zME*sQJ$w*Zy$4yn6W?JIGIKRP^b+K6820E)GXt_UH8`|~2r(tFVI6NF#T?4(VAF&v zXX0NNcn?m#bCTVnn`8NIf+@_&re#iJpBXUjW}2R~)U=FZHpiZeyk5w8meE#m&UMK0 zO<>yFxt87Jck#qT90mS7 z0o^oOzXE-=PDuDmHgsJYWN|k7GC#Vn7&de{P^Lg+aV_*@19wc}t>`2^g9rRV z^QF+6k~Iq1I}W>g8t7D*$cFsQ4F|Unve%!OMFlIGxXL9Mi1(1cFGL5Pc^~U1eBER) z1BuvU#Df($GNA_+cZWa!{5MlXBXRK1vH7gLzyPovcY7T3%0~ty^8S{gXt7D`*%$g$1 zo|4iU817rJO2Uz=hrq*+Gk4BPZD3q>L52n+Lx*DXjF(=KXZaCX`a{|*Ik50$IT}>& z0&?=EyRy5 zkQlp=DhH8E$B{JWkvKQ`^`X2{evY;F28s3_>GTn4_LVJvV!!XNf&YBRO8zV-(91vMm-O)?`2*O{EBQKB^JU(h z!*Y(6KDAj0ljW{oqQw&^(*aPOIcu@)ESeJT$^=G{H2Gs0nDm5Vzg$BujQr0hmzA?ID$-D8@km(131wDNz!NDy(dQV z0vr6k^a-i|mJ#~QtqUUfbV!QB3OL1>jF$HCZ8-M84zf13NKw){UYGEBrWA>XdO+&M zx2?fw8emh@;+sH5czGf~{*3s-Qgd*cMpAC{gO5}d+oL?0#ig-DN}@0PB~Pgsh;?D+ zkT14-0ocg-CBw=uCE*YMvT{kEnR{>G;yogEeG{zDr9(~=-#$wG_zc2Fhsb zgf)sYjN%NVkR2ne9ps~IM?UPZy7Ah<+KlE|gWOmS9=QbjZ3%YVQuNbOt2o|8A?&{V zNRiy^?Sm%EZ1u6yp|8BK@D%veI5iVs8!cE$Sl2YQh+*LhQH#`l~Fl z=aSY<;?LKxKQAKxPNUBvh|%sQ<8dpv>1O2QCShUmI`Jl)=rmn~Awy^3lT1OLjur7} zsBa)upTz)d+=0m3LFl?6#Ab(xyXd`pygoqYJ`$s_iN~S~Ct(jy$Cn8eU+`ys;h{J< zu0RW_)_h^J=DC|{&E>qaINx-UjAnFTFDKJd+-)-7Pj=hTLd-y(&q3#g5r>?|G3E)4 zeLeWjWnUwyX>(~|_)~My(Q}Aj&qnsoCT1Bb((^f^+YXRi5W67IIxDKe(5hiw zCdPi9XK`D!wC;lfg7E{BaMUqa2c!THnG`(uFg5!cy}l(<(J_+_cgPxF8!~tnI``J80#Rj^k61v5k%^ zBi+TPcx2U`5g*H$ZhijLAp{E08L?^ajlW;Za`o{~=1fMpFL{yUsG9Kw8_Af*DU z%?MJJ9eI!!o6#39z9W^_lMAtk;sfFJoyN|J0vwkrZl6q zBC6Gv-#UQ<_dq)J!v`6RY#IiJKNLMW7<+ZFbcfIPiGn@AN4bY*bBD9vk*4z93@qL` z_$dppbC;oO*U&c6cEI1=3+`|PxpxxX8;3l+jD36q{d*Um+`*<)kS&-$=UfW5Ar=9T0S5CP#!+# z;`8KNt245nzT!L1V=EvuPa(a%Kkt1fJW#)ZmzRN5ZqPY;BqP!O6 zwHU`L#xYB94u7uMpX>5xYkxG8tCi$)DXys$=Pt>%{_Nw=_9g$@zBs=X<+md49k7ICfky?771DX1?-QDIfk>PR37G`2%C_9lH9xoydWb?RUR2lG}UE?w>tW?C4ZznCrGY2WbR zzoEV3nMFU)=ikxepO|^?(BUrs{W&x88FS+?nDRa1VmFvCmzg)`m_ujr|IRS0Pl35b zqvMaF(GM{ncQZFPq3c&;*Dhi1&PSuqL6grwi%(|0j|E#8fsP-Deh)?_^gyR~LPoSg zuQ$P7Za~&&17t`;^5yCyTWTR=DoGEJIXA2l(mCYM2_n~r@gMhDIlwzINZZi+>yTf| z(EIb@q=bU$%|!1{LGF!5298D+4z;c!`!1rNW08q5M7ob5Cy!v`@3pF;x64|atiskh z{D77C28-dk%&~sKefc1UBHMc+>s=U56ZCBmzC{&qodDvIW#CTv!>=j^mkOzj4qlHA zUPP>DIyz_!e&rB!SFkWeH<18_^a+gbB|hXM{F}R?lDI~!=)5Q@qQRez-~)x@LvF!; zTnEm#5}axo(e=eb1p_q9nN&=f1kY(KxYj6qt6}05e&q|ZpLh-i_yi8q12Dilv>W(g zS7;aDE}a1nJc^IDAMR2(KId+H&Tx1{;btYUzw-EXWoc#jz6^d{SrG{z=_o#46u(E} zmqxN}B=}(jaofXYZMLZmCRdx|)y5aB#c^umpVkILti^F^am*TgkB7{6k(}=sc-;x` zx)`qG6h7i;%^{Tn4tcnrFI^XS|c=eSqg4g>T7` z;nxMh)og%W(2OzB2JT&Z^nXV;SMa9@#(o=y4j*UPtZ9s=Q1FO_;1Wy0j8~(-H^Ds& zM{h@hTb#l^JkNN%isg3)OYkWu^E$Ul}LDPS_6E-rg)F7h-r1e zS9JM~gYYHC;&aYamf~fuA+oiVh{!(JX2+B$_-4^$UB)SQsigHtxuHBIOXnrAu6N2+ z{#E%+OJa`{Vq{JwQ85&UB8ZkL zYBDS!2T>VUOX7VJ5wt`Kf&F2d-)!}ht-mP=obeY&{Y_@D>ouPC1t#1FVmR-Jy}jeg z-Yd^}eWtvjJ*Pe5|9#p$-_ZCA0?^FPl8kKd>dfz>|2*vwa9Uaf4BC5fAvZyoaa01wVKd$ixE1 z%nZiQc-RCZL12e4S_UxA`hePYW9+rY|EsO2e@b#co1EXwOx?8ortHj2On*P zEN&|HfZ>Is>voH(viH0-48QebT6?D#Zuz;I9 zyIc66_wYfV5?6Xle!*wjFVP(zGZ?>d2)K8Mm64I+!-&a^Rg@nr!WSR1F#cmPbWbU8 z&Wenkn&_YgjHPDykZl-w9f>)02XF5O2VppT`-xy7q3EM!=%dZ#73{;#I)N^_z}USD z=J|?R2VaRTC1SHVt=3@s-5BTn&^g1%gA4)F87CD)qm*FIxa`0x;GeaaNA+P?H6?b? zny64mv`cra&tPWXU?M}Ku`4GNeVBtDT7=!Xl6koS{AmYu4iBJ!&-eQ5szoh#yrOTe|_iKoe$s3oL9`Or#gOXAqYCXgsKid^?@kPZ-}W zMs}=1dTane+r}?@_~o!X2Yhsad=uaM1CYNLaw6X*qp=K7RSN{rrXiI49L-pXs|5swX)N4(el#r@m*e89)5Q*wsOJJ z$n9?B`Mw;jEZdZIzvkid|Fm4Z_u>0YAfu@{hmC8n;C`fl=_bN7{Y7=acsL?7isj9Z2$x$naI*nTycz zp?otPRC6MT<``^&;XH?deA^ps--W1JTaeA>SOE?AwkA@&GFC!ao>noUT?OEbQ9m8u zs}_D#75u95*c~OXKm72i3&_8uJdB5&_(WOo*VBNP+rZ5YM%N$4&{t_Yw#`N`fVKED zE0Oa{7<=>hWj4}(IzG%a#^qG(qVa4w0@)wT2<(VY(;RQ8ENM z691(Rm`7!>kJ9)rMZhNWfs6QHmu0|4^Jc!-z)oa*7!wKZUM92A0?X8?+U+Xk)ObI$%=OVDwfZucSOVC}pu%OXJ&=LWY#Yel20$!H>BC*YdLP z#kMU#E=XQvP%euV@vKAOYzM*KTsb3$!08Ut4uRDjz>eOJu5jg(?BV?`bjCJx#uhNX zjluwXw2+_*98MMA$R*<>YYX=GR=(fr-c|*J^FYtIa#B)rymTBl11%FR3(be`vU3hs z%f{!d$i}SrT|UUhTwG&*FuFqcWQDo*qW|UNxb7){d#l2|)Bz}`(cOt+j2<#iR%ww+z!M(83Oh;62E*L zHp>){_L;O!R6-RJ1iz{wHzF7jno03zdI7TFSgQP ze2J0Z0b`kMY+srJZG) zGi-B)-(5JvX*9|yUZZIzXh*^LqPUYtcnFb1nM~s|%Jp0?&X4l{jT?E-bL;Ub4F@b}O zmvAuJ9oT4F7)$G|K&(45ZqUc`@eAjmr>9%lvD-2;uQOobxyGb7b+2s9X(76z-#ajd z+cKV85)){`d~RsH#*TXm=i~u)!fkTSZh%W%1=DuTi8!$C(_r2w@M$BMHwT%=dzfR} z@q0Hh=hic4*Dx1X5g%9qPQR4cz(P1KVd6Iy*-xyZ?_l(w!SMe9yLZ_uZ?OU25C?b- zBk46UfmcMkUlQ$pNi5+7?K$l!{`F)0>__<9k3>IOUs@kx34O4Mf{7~xV>|Yt_5QE* za=+el|Nieb_u20O`DG8?#~6&=1QNh;pS#aBnrj$?EjkWNWg@YNN%;1YiDgWsP3M|n zm6IiwfF<{rdrHQVGsIf%b0acnn_Et~H9Oo%keA&v6yNzyBjjQTQIxZ zBQv@oFZyEH4`n=z1EZhL7?}@OeFgEU4dC)S!Q>BMV;x05oQBhL2{~{Z|KJI6uD4)b zUo3ydU;uNzDzUI2#%B}ec3Z}AcUTqurOM2TAZATdw|_E^`LGJ@v;})=5A!a9nH$3_ zxB#|y3$6BqnerCw?<+Gd5g*9J%JKwH%!uyHO?`^O_(?9D*M$k!LaR2!Z*Rq?&Y;eH zu$zYPZVYUbDQM_WbogSl_G)aU&Dcrd==mdf^3mx23wZH2v78=ZHNC<{_y-&4JGMgt z|C6yG1Rfv7NU?CzV9jK~V#x*P+!wp21fF^THcVAK#vr`(hWs_>uN{Bgi0t;mhj)3K zWAQqt;Cs#@&O1+;N_2RZvIM_$1u@|@yxYLLEy`+q_jSr1*oz17fL)aZzuC?l(eYk>5)pd`vi1-;eAyb0-82|k+ttktS^>ru#+;mtA3Oqo z$ffH$;{!J%u3F1_jxBTpIe7;8co=!G9h+noG9wI`HwjrYMBD_sJxkT9NVgvwfJIYG zECy?wCDJ1SRidMbW-xwgXK>2q>Jx!gtSYPBOBaV~niSY|dy7Fw&TP$PMUD z{!SGf{A6$C`>*BYa~|@za`XCs+9UF}p0M3BqX7H+8n4;^jZu_imf+Z>I7eBy`W47e zsKR+`kXICBIJg#*Yf`DrXE(bTsmU?QKz@IAbBK}O9Kn5s80E-q2&86T4QfyaanB9R z?c_onFxs0ZjGpw%VDlzyx$C%1kB)Lm~GzjTjC8X&PLsMewgSf_?9Yb9%~b1+S?a`1&9)i1FYFp>7^&8m!mc z;2i#L|NjPj|L6GjU(i*5(MK||@U-aGTTg{91=pjk(wapQ$rm(jcOJ6Y;I)<7=%$itm8~ zbOJQ(3hy4{w|(MW5^}(Xzn2;ANN=Y(zm6G*hnNXgd_D($uRC&@yRNsJ~Z>%-;I&?VG~8Zw(*6gH}@OLUr#RY~jie=%WRa zBT$tZh^{<=n(*^$!PBn|$Gm=o`CTnY8$%0WyHQ#V zZ6wDT#xVzL<+c7?MIZ74dQ!8$o90V)L4IqG=|8>JCD)c15c+LzmP*la%K9=SO#BQ0!=pL~5_U!SlbtXgPsTzYBkUCBFPD zP}xx+vOPgnnt{Rwg2VW^ZHav7g0$eSCa6w47XEV>q*t*=juDmEj;+50n`JUq_h9^t zj@af6u)`}L^L_DqTsWzM@0@^Se@CS54m0R95`8b&=W2Yh+4yQBnN!`d%bMcf1rm!X zfYl#&d;N90@4wnXh%#DTXjn$e=Eb1fiss|>B9=ZHTWVny1 z;HT^4vwnk5`O@rw%&w0dsX*RVVRH{TTI)@3bD^mtcfJ{;%;!d5^E!3%V~i%`W>qKW zs0_JBE}5K_TqG}Yl1wV}{3h4+BeL%~IhA*jkC%;H#u@S~qYTa1Yb5JijCg&u@s@gN zkI09*LCusn@}-XJ1IVH3Y3!n|+D5&Nu~KhlEFy<+X zHP?IBcNJ(Ad0(0RtFZ1ub@Fm+>6tm6k5O08#j*1l^>ts?6ew&o*Qu~bo@r;j9P0}N z8vUpXHP{~cxIQ#Nq%S+H9t^Q@(<5c5$(JKWx*;MbQ$ApS@*{Ec4V9U*X$=7O25ClBNxcte~!CfElbEd+T#2VZWD zRR|A1kTKgFyT1=Ozy!wfV(k3waQ06S7r1U!As)~S8^0&`=_oQ6=eW6|E*?xrZ1-VU z>9gE1xdhNs531s4hlf&J{w`IAH_`&WLoYDS(XbU}GwW7io9{--pLFMSZNsKJ3Ql(g z%<;LLUiksGCo`WjGp}8-u|OGsyDHET{&F8iBGA> z^2}77E2I`yE2shP{H^v#^1eu$F=TMfRwp2p=8%81jJ&NaWNYn5R-IH|sF$crcTY`0 zX4|!|$hRb9wW5_`Wtl2k4l=R|vD!i@tvAwe81mPZpEaA@ti@zVtwmCALz?bGDn}xD zW3(CCMJn0dqC&%C)@6824cm{}W-VUZp(RkWAz3@9nOcOV+Ky>nwrHx@p4KwhVzn&f z>HF9&QuXe#mdAEg%Wu1;`Py!1eyo8|)OJTJZo8+Ipp~@U*Zke>9CElU4Qt zZSjD6xQ>=Nhvqq%v7`2=ltoo{L z&;dc@ew8JnSU~-xWTJ|os$N4!$Dpkb;O}fy&f#Mm#H(1(cnC%QkAgYd6CbP@5xc4| zUyC9Av%+Rol~0VQ2iOU5czFl;TaR}Tiu4~X4@Ua8Mf%qSn=OGKkPW0(VU)(hGkFB7 zG#1QfFBr}WqRUf2`TJwNwB~;`Z2cl2OPN8NEO4){#2OxekDVbF77nhm6nt$W@vdNe z>lR?3Rq(qDfsLeRRynb=J|OdN5<@wP4YrBcN+@%5q_{-N@>(a5 z_d1Nc`##9Oj^yDtHD(~kLy+ME^%B&w&X3&oam(u}RB$V;%Z8usWL2;PJ&XQTPtEER z9{O|La6ZrzoHzAv&db!Ajb+`5llo(4q<)vWT-Tj@sClrHI-Xlu2WBI+0@hNaYb7;j zm+3p4i>W)iP~SwI?G4ljT}Lf})y^<#fzGAIKqzev^~~o`A0d=Bmlj5w$9MDjZ2{XZ zWFOc5OZ08d6&!Odbrm-0QO+HDG_?p~oe}y)uH_onbjNu~f5bJuppN%@>JogVzUUw4 zN3K0yx9NZNw7RKh(>;uQ=#yfsOA~+&sfq4rLSM9JWwc&;V`B(9V=OtwGxb6A)fi&~ z^+dzT-)G?&_~GkR!1t+d@?pz`c_zWL09$ZU;Pm+n81Xs<1%IJY6U`Rc{-$N`|tk@?T!TXMoOL!6F z`6)W!2V+`u=MbC)-+g4&Brm27_)dT1|3r8ii;0zOC+2(%wDKx5H{`yHd!7NTr64w308yj{AOxM5 z;X{!5(_q*y10mS~-f#?C@De!2V`}(+#!hr9*;qNExSC6?j_ux3t*Z7>8zWOXB3t?+ zW5ywYLe*%j_Uq~neAquTYw4tC6`nM< z1vYQi%JASk3g;8rL|d{p#`Z@WZj09j+CFK$ZSSZ>`9f=Jd#p7@r_{AwXSIzBT7c~o z`X&k;w2zh4wrQDIhr!#nSd(m_=#RK%ZJF!FYo$ zLF%dzg)E9*$c$dF!1%tR7apP)V$lkF(F-dXjZ@JJ1JDbt(F;}43x!~1q`_)Q!dHER zUbu=s5b5S#9L4h44w|_HyJr%ZW`B3CKsJzk35)0}c<2K-e`mo;_u>nzB4#!n%x4f9 zqAl^UYGAN_#BtJrxF+Lwzai#zmH8OKyxaf=JO`W2g->^)9$656fIq0P4;(ucH2Wu5 z&U5%`m*B4*h7-3QdvFfez({hax>3)hG1x&RY)4=7IMr;zks0g28s@WF&_rr73^tE~ z&ujymSqx4w*(l9A1No>`n1Q+}3hQY70Z;j0$)ACU{b63kMphm z8TtCkd52ofm#N!)mU`pIkjDp+{ky0avsvHiT%&KM9`riaFkFc|UyOVYbIzi^%yjC^ zOwq@)c324YWrjOL^uf-N`T*xJYS9d44Tb@FcV|Cp;`E_DO>ey;^=LX$2fHKnxjWH1 z^S%q;b>a7J)T{2n_Pwb~-Pbup?@ztRfzHv?)o~qn5;e`Ia-NygqYmY~^YwY?kR{Gl z=#=&7ldaUC4rjH?L;5l2adg*N{WAA*hxN;zQe*Nxx+C8Cnc6PLrrJc~J{its<8lP!8bte|-Bn0>IMhP+z09xdGjO2pixW^&=j#4%%m97rx&yYTjHn z^P5jtRpg7=fU(jAzj1__o3T@h9J*R?CfiVRuAg{_?)UBMy>37zcmkzGxvn zqC1koj=ZS9mXkQDKe$m1{MT02C-T=^m{Df*M=>G=HOM<@O@8nI@Xjg35tqRm3MXE8 z8m#EPRh~FOeQc+W#Qp{`Z>BJRmV&A7AYOJHob?*fuow8<-@*Nb^bm~b3-NMODi79Q z4@<2*bF&{vzbnsl9yzWX;65CbcQThxf(hSXZoj6|OM+~NpO78qRuSfXMf`{QScz@X zCBb0wqreWPqfM58C~QV%90cb+jXt>v&+P?h&{rj$nvDHvQ_F)2)ddx5rtQ&}^)~=xy7nrL*l}g`@-MpCicWDD3~^S|Qs>{Dv5G(P?D(S?Yep zX=Tt&W$_Qn*)Fjv=4IsjrT^MRK40Ls^X~2Z@f(UE`wMf7Lii2^Y>`@C+hHvidd|mv zF4sA?vbNj?e2UeYVq1nDT!3#AijOlLpJI~s4%F!d_wWGU=sNo0JP1<^_ZY#w?%|%d za_?*CgGJz8v#5zNp1v8TrJ{di`pKa-#iyu=Pf?Cq6MouF@UwI1?Bn$BKI&_10a;rC z$~Ko86BE@KbtpQc2e@2obva(&ED)s-u%&)rOC5NQ4SAk{_!Y%iNz|n~?CNb8igEB9 zBe5iQf(fo-EX>8H9FHa$$h*$UI7Ur>v`0ITr68CoE@n#}Jk!)l0t}OPAdz>`Bys#5 zA(phoJsRhsNyfT)e%%?T&9Q!JV*8XPPL&UieFpL>Wa^zH5Mg+SpK*^k!#N@g5k!Bt zVM8q=#xRr2vr+KV`(R(SB^F*6ery1I*+S&7WI?ysz?uFMt^0_z_5=>@73{Q=L}B+4 z$6HUH%>wdnrdS1u`(-2!D3fXPix}Z+eE*wp5n_lZ?j=`gEx5p3;*evBRrVw1*$%FE zJ+K2853VrLNFSn;ULaCBeD3eahL^+;Zh=3Yb?cr0Y9JJK*H@TrW)NerMF$W+=*$Wk zO|eg_gI$!uKgn;N#b%A78gw|g#zx`=%gwjOTx8Q!^iha;+8BhL+lx9~9nAIM;Zni>4H3WOF!K!x^4JW?KA8@I7u>McrEpLqsVENwYEd{*F z(66%s-equ^^Q@0_2AnFI`a#F=YmONA^#jIjOC-r-_g5_+Yw$FNM0<8hRT1m~H z<;Ef8?>>F8+kW4rFEDoK^NsELJYySetGk8q8fI*vCXuUc=Ka5I9-rs)+XA*-$bO5# z!j^EHrO5Q<#(rv-9i(Q^Va^qaUvQM#K_~FJPEk83R^Lr+vwi5L?avhl9%-`3mYD0DSW+3)Iwvm z19ZDDYrB;ozEU0^qZ$}gJ#z|uJ(oUTMr>juHHX5D(Pkviy7A9RSX`Q9vz4_b}bL?d#v+nMR8L6id<#7|7cCRqSBwT{@tZtRpO zFw1jr9q*u5UXx?}1B}WvVG@dc)VDZ6ZUNO`8GH4K)3?X&=}U}11TGHID{|tfoCaqm zf}FQlxP`ai6ug8(_?_BBx|jg(AQUdl3UU^=f?FLVU*eS237+~uI1UrwJIsgoupZvZ ze)y}Wu*t4l#o?(`hM!W87_%SQ+?zb}Q4l?_qiC|oE=ap;( z)7tOOMRA~=RCoT$XRs}YREUa(W#ExlBR{7hbG#k8sTUE(VKDh7qMt(H>Mf&6!bWtI zE9M-Dt$7wC`Wj5Lhr}@7fO>s}m6k{}!y^0E3zl&vv{oK6c#9xw%D7|8^@)(SLJoCB zfAs?g8^QkxXfYT2dm-`U)!4jSV1V!CT@;m#PP=8RWW&CnXCwdu}yB@kK5c$F44;DpWl3#Dhu$Dw7dhJ}aJ74p5rhNY#Ak!Gd^I zendoyVx1SK8d6E@^3tICWkGbxsl&nBMskGF#81W{ZO4P}PsDzojAu3l{WO_jt_i?NVbbX9!-SnAd&2C zcqFTcf6aq|I2mm+0>r8hW2-$Qs}bD7>clL{z!&!gz4Br7rG`Z+@&7M*R9}f>zb1b1 z0R3_m9>p1O*+{rcyBX6Ph{Y}8-As5#V;S!Q;YoFckI{nYMjh<2O3aB8%!2$xIkMo- zcr!CBcpZsw!oQGb`HC7Y_o=0Q1rEp=xM7j-Zo~0$Ho_lsC@xGRm|0M>>t90(>+7Os$sBjihb zBsX|jR(N*l$#d}nH&e)aFAIzCIbOZW6WhCekL*#{cj3%l&==zfW|xKWz7h?GxGW zubWRkony=t4vv{@&Y=cH7{^|~ITmxSrJQp)=UzqpXAL~`^<2|N?D8$}(znC?-bHL^ zFV}s58Xt$j{G*Vk$KkKX5H~s_%454%LcUfNSI9NGPCV%r{csQ7`y9#8$v9AY>eu`zJPCXzEe9ln1k+@yt!h2?O| z))H&pVpV`CQ5}h0myy*3&SEP@SqExdbVIK9CNAOfDTXrEM#D!X`;DV~fm88=IK?0Q z>}1BFfTyHM`{A=jGFGFBUBn^#FXMaMA}{FyWBHjoUR{YAL)F2ug53FhDb%cU<@jlg zeJ^;5X_*I^&;&mC_PL4o6~Nms3I##`XWv}I0p z#DnR|OzTNij9_qr{%}VI5hEFj78yb0dla_b7`TDsz|1D#^GqhMX&P-N^K%w)lu+7S zcqj8{3*Z$lVg@e(QCo(!xDu~u753s9H&5y!cH<@D=~p<@RU+-zupqB<#_M=fu6E;p zx9z-leZSqk%{8`l?Q;n!aFHuLPmAMNXK80>r)e>?Xxd5IaoREBphsy@aFQcAeAAJs^D5XgE4cq<^yX6f zVlmQZ5&g1&{+UNVh0$N3a3^QM*POvqm_}bthDA03_TyOi1R?l~BkAj5VDdxg_d#$e z2f)ec2j3tVJ>Cn8xd--hH+Z03$Z+Wd7oh`^tSwe_E0Fu<*wsz&JsZL$t;ch%1Ba&; zGOs$%x+=J4AY99eVEh5pMJdNv_@8$3 zwoGteGcc~wV`rpAW~OGGrGl5~4L{aSEeH?z);99tRCseTF=LCUfyv0$VdJJy&A|aH z^Dm?Q4=n*{ngClkA@nsI)XF@K)DE;E|1F}iQD*Ink# zL+0C4=HE+Z$2(@qM`q49n8H7qZHdh26lSDgYfZjK4&!5Z2rtNEd;^C55x(PBFp!^c z9{+%+r;wK~qyXy8R)#ZEjr@X|*r0W=0~(M+))Z@?CD>(KVyzvqNxNYu^d$Qrn7on! z*r$VGzYT{AH;U|pF=U>N$3vY2<~bGXbq0IPp}uVxY`KNl8%wcaSHPZI4V!Wudu?#z z_95`>#<_D)c3_+AqO!|wtdwx<+`UA0_t6e;ltUn@hdI&_Y?ug+7D=2piX$F{fpLtZ z9>=~pNjxqFUe9TceijrqmNUd*C!ZrOaQ^?>T)%DjudQcW*FLU&UB_67NY}1KiOeT^&&RwIeK3fIRJJspi%Jf)yxJ#wzMOXfZFBoz@EVZ0OJY2OA z($n+ai1wl;QsWxQ--?5x9v zTZXKj$7q?393ICA8b`Fei^NcXu<-HxS2M22Xr0{FTW_n~}`+zQluE`Fkz+wJyH~ z@~c1iLILs+vZ9ew!D+GJe7o$o&tQTt;Vj-GSK|VF zM`DNc#Ww3qZNnDWV|BqC0>uVwvgO!fVc0fPMOqltDr}f!vp3v^j?}Vk&iXcW$sex* zR#}GJ?IPrIllexyvDKzGoCt~9j!AG~;?0rpCi)X2?QT9W+rp`9jQ?88JPkH})GTRI zUDDic<~G-xna$<+wewAl*r{Pmf|C~lMlsaWQB<>cVfV z$+~`3sMQ-le6_T>1)O5N;b*Rb2K9L50nkpsbj2f2?W zj3MA6L*XY5XQi!C;3Q)??*w9tQ>ZmO)5y&A_?U}`Gp^(sH*l@nx#qoae~u7yJV7kr z44A-0;sV#X*L%bWp26jOOT6FZ9X z1<7SEN3KF0IPI=nhQ4T{acI>r@>+OqYJ_;7aba;x3 z)JY)Hp~O9x!?oDNYRh|I=tY51pJ7FjtMD=IsZJuHn)VMk^$#@%SgS9*M1OaE;&iYC zv%yDn@c><%m*T`k%Tf>6#Tj(f7On$M+mPD9&EUVZ*5(uQTLN2gB^;P_#7H;6Qrt$g zZzo&tAzr#4KFJ|ihey~m53h6;7RM3bd_=~Q%^fUO2H{h+DhQD|S4*eD) z*Gpl~&je8j0k!D|zT6RfxeuNgB;`~Ug4_lCc}JBVAQ_jz1*-K&p}BUT zotA?v%>-K-1wXbI`luy3s1`^-Y3do|B`+eaoCt^UEisf^#4Mxn4R&K`ti&dtjx{m_ zyQU+Sw#!C#*}{1k`QB&@2R7CR^ub+hxHI_m`|z{YV2#ek@^jS@>%pgH_-s|(xo5%n z?``qr>LT|8$YU$SoXiSO-4l7AOitZrWc^dH0hhvDb; zAlB3Z?s9EvYLr8s`+^x}rLLnVb$3&cA^&n3JHu&gij1jYo-oTGy9=6I;k&Fd?Z|Bx zukn9Tbr#TRblu*^W*~uR-QC?&clW05?%t>F?(Xj1)Gc*)cW>H0bxVSb0^bkb_uh5C z#X5l?!vHh;?ERO0eiO43z%U={oxInHU#E!Eam2LE;GN6RRm{}05Q9^JCx?2wfseK# zhBnZD=~dC{mC;|J`+f+gb^{#wf*zrtr1$(m)?E(tao+;gx|a2qR|I>|zX#WS3EulC zsCk?OKQ|8Cb|gBPVf2{~^pEcYKiVDKwPxi7aC;5)xp1YkgL=qX z`ZTQudYbC`BzVAyXb&ea#-Sw{3zzzzQHkqH+*e5-&zQjdf#*%)HIsv{t;sg(pi`{J z+H?){`REoG@m|Z&U9RMP*P_AP$bM|4=lw3$xQ#={cm$1RJo|eAE#q}<3VrKmq2F8p zzPy~zw;pVH8@TyC-N)J*R{ac^@@2l0TWIAU1$FkUc0#ZIpL%WPs%FeP9hhr+>k-U1 zX_@D;qwg<-?!65Ba#dD*Y=FkJCBH`(ewP9KKBM8Gr!kK#;&)ri9I_J}`3PF~^WG}n z+ula3s@;LPqraDKob&;m?G5p*;#kk)It2hfwhAUi%3Fb=SZ^!U&6;ggC1hp6qh!IWfRhA7OjTp2}s zGkE##U=Jg}t!9z;tj1fu2hY

Y97Ox-AX5$2`=2rQt1WQUSK0rtC)DKa`4MGUwD{ zDuzu|u?NWW&ruQFA*X)>$McifLovAN<&(ab7SQRB@IYV>;1)qOB3$H~&%q^IM>w`^KVO}T&9$kQ0A{!`n z8s=*U2)Yqe-Ocat1Mcx%uiU-80^&0=mHbaFCj4Gsn^rs+1|zZ$qgByHTgaQh8Kk zb|{XfB|HAPDC0MS5$|~tSxy3|@O==Pvs4lLs36u++s*|w8H>WbA1FvWYKeN((B;59 z^5Y#(4~7;(EidE#{)+Sa8ClmgDuUyA6tR@)rPl;p`S+{ zv@U6=3j&^M4;?nXkoi3))4xVNZ~{zn7hKS4ILNu=gA?e3KA5$4x`CCp#KTmF{J%VN zMPYgnXCwDdfe%K+52MmA_zyY%M{jp{$JX$U4Tvk%$o0#3_mb~#Cf{F)o_an$o2hV- zqsjFLdz1Cv=p#DAbG7n5A-}(c_Udgs zpC3&wKMbw>KwTymcaod`)jJa#+w0%Z&wtTdFt;>=mu!q?qoMv5J^dTKE^|#C{U!19 zMR3&Ox~BeI53GSwo%?I>crBh+o9EXHzAo_kPkJodYX#5O7QV9sy1~x8UpMBkUepl% zsV4@L_YWuMA05p1qsjNvknd*->iSor{oF{NzmvFp2)%ziI+07P%6nUH&S&m`RUN-yU6q2&1!z^!M4RWB!IZw8l+L-QIB zMt_Z5{}FNf9m?0AJVT-qFo7`#{I6-@t8?Ia4fFsgOU_@Do}*3S3)rHRuVd!fo zlK0QS&$@ydbqm$#0hsSI9FMo~u|KD3{g+st1dinehYEqeNCgg;4UDcZJweI^aiSF9 zg4w`@icmL~2VbmBEfC8&&is}3!Qz~A<+)WIe4qh&S_|TSN6yAx=pYAy90ZgC6FFCBakef3 zD_O;vy8%pNJ7;km=)@7u=+pRDE`oAg#q)Rzx9WW|!e=;JUc(}Nq@%@GYKxzAB}&Ax z>cOw7p?x;MCR&IRR(zC!uD=d6l@Zh((Qu2Y$T8EwOJyRSWMdY|Mc$c@=u!~1ePL>n zV#JyfC}v8Lla?X!#K1lVIIVIh@yny93#`LbiJrxkiA$B3eJauuxgt@jB8rKMuwj*g zqXW19Gb+GZRtVk>Jl2j8cy251YsvOnu-#a;A6PG{F?-XH_pZ-A)Ma04Guzc-uB*X5 zRwwdR#d}+sPf?LPxjg${7JjV^^I@rAcaYRL22${B-K6ik7%gXnVk?$id z-%ARU*BD_^=osRmGjuXtLw>`(en)-!8BNCra_)D`wy*h4U+~>NX7;@gQ+t~l?i#c4 zCGI&#ygfx-cPz-+1vG?bz>`h{f30o&UYq%EJ@fc#vh@|Pc}s}N3&E!5a%&cd(lq`~ z;`bgGD8vfti~}^ks~Muw5cSQBINt| z$@+7mQ_RfYw8ZsjZaHw#S>XdsXiao*B?&J^lEHuvL*Zz3fR(PGKP7o z6qs*udLtC2zAwo6osa$#xxs~VfY)aOug?rtoRRuJJ(zu3FtOCkX(_?#Q-CW+fsaLk z4}=pV9N^9&V9-{uXaQW>0zPd5t2TgH`-m<&xV%bCk-@k<;O{Q5ZzqUZ04x8?Z1@MS z{BP#SUpOd#f};H(GJOM?_zJ`InfmGzRn zlbJq4`CV6<7iQjqLVlRM;qwRwI2j!A8~UJo@Ca7XKz{)M_a(h#j-a|VbvNF zMH{kL_1UYs!TEhDobF<2WAX;_9!sJ z5ok4r`|}aA177EVJ}E1ElbOBA#NK3NZ_?qMNQ+-06?+rS-bCVs4x{T+2swnv9+}D1 z4eX5%-Ma3piw>l=U%}5L!5h2qODFj%p=GZ?u2CK?IR-qkl>ZC-m&w>pg~_en2gJ7u@qE9Q9TD zYh5JYImf(r%GZHDU2UjRT7rW%MRVB@PM1!Y@YUO>Yc`?XUk_is%2ya{G#~X&4l3)+ zY&jiUPRR-ok#x9lFniiyZ!NI62J$5hY)xYB4R}a?qwV}go&AX(n{T{z$)Rfa9(t>Q zqgKFMQU-laae7=9V$B-ZV{b0JCfVqZnHhg^de(QKb0zrdW;9;w!SdFCvo0s+TI@A} zxB95lG;ltdRpDT>@g$53Vt+sJsC;9ExX(cxb`W`4e{zPttijpa`_9`FT(&#@@~&u! zI+L$;^gi~s_dX=AxW`(IcfGBs2U~b=cw^D5HbcwY3=euU>i=f=8vpNT%6(0EtnvSj zYyao=6>bL}zlKlZI^zbfy~TQIcks5~Cr5n9dp+hopYq->z=_|0UB2^nV?TSczkSd+ zfdaEG#ZT74`@`o-@=oS6&H$H~$LC%Gwz<-4W9=aa-%TXmi8Vw-_|AZ1TqF;= z4n}#ZC7o2|$J>~ZhqvDBWH>q`B5)Z$_uc>Ht?eJ)s zh`)jLQmTO&H7ADmgpD2rMmiUabUmEJe)x&=RJjkRaz8QGxWPyTV-Dt{5_Ai$&Wshy zEY^*gZ5VmZH0Hk*V57U4FXNd*Z!z1xCU^PGTx?)Qk48C@oq4_lxkXJ=ACR%}RJaTA z{cS~Ub3CZWIY__Ai)eKo;|=>{N{y~3AG)OqR8~#V19hkF8i{f#u#)l`)SvrMBAuf; zyU%K4AIy8G=}u9j-eN`N*Z8o1Qj2Ql`gok%Qa|=({jv}@r_>N zn>|V;zlBPF0bceoe9yh8)nhFdYVu@$hxg``mfPrhPf%}fW#znu=&Hx@YxM?ijis)x z#4ni-H-0K=X#;4*4_45A0w#VA#rAG0=%pyO#`8CzjcY;&fO6!F*~uL3XzH9O>|T>E zUm}OzO=i82Ol$$dgOn~d{iCUl$=Rj#V zCr+aJE6&$T%mKUUN4W&9 zYYcI`EBsb{VnInX_ZfUAiKpwRsV0GS_N3mb@7;)gb{aTYUof*~;AiE)(z2uNw}Y{{ z^oi7nz3^o>0jn&_`o!7v>F8gFfLC_V-{MERfydwwYZI^2!u462t_{_`YVEYgT5avJ zR!TdfWz#m}(OaO&_#HmugLtI2)6T2)@ZXiidyx;%MS5+FYR6xptF2f=xVHLUjZq(| z`PHjxCN*A-RO3{Ox>=Q!mFh2LftsL9SDz{4)Z5BG>P4l$dP3>0#wi`ttx8LEjnYJ2 ztkhL!DK*r|N@aDdQdS+Ilu!pNh1LE_0kw~kSM90fQM)O*)Xqu{wIid0l3i`jb-VwL z_B_^s=X6r?GYT*Yt6h|0YImiS+FL27_G7z)mFntnr4HL~q)t(qtFx7Myl+=^wbEDJ zq6|^@Dx=h+$|UuiGE2RwELI;YYt?tkcJ+sHh|h6WHLEw(2=%F&LH)qz{G*oRv)5Ha zSYtU2K9QXGPKx6Psfxd_DgMOH+A(#Yc1<0xy})PqU0sJ?aGw^Ton<}ads-PSL94Gh zwN9F#57W|u(-!2nsLF5B0_?R9nCk>Q9ZS&e?m)wQ3LN@A{Y5^*1*-aZFxBOHPB83p z-YcvN`UWo2txv$iv7CB5F4&W$HS<-V{+`WzycKWgS$MT)`2YUWUo@cE&IyK64Lw+U zG;qV1FXsCcj=pF(*@F0uYLPc}B*z-Xys(ITZV$7>CHUUgAp9;eLI>K6TyOyu$(dWA zff_(I9q4(pmNVfn8iyOy!0)*wQ6+>KKhp=^XQ)8DZ%(z_pDK4MjMy5g+Cx;c*Qtlz z;_FVLhO(MuV-Z;T0?)1{jemLaP~bTzW?Cd_M%gbMA?}G zUZoV>IBJ1WwnAsxC)kH$Iq2jr(6=*SbN4|dKY-3T`740wr3PEf1NIjKhgBC3S{q_c zZ&1LIpo7z3gqFZdZG>;yhx_6r?u)CiNe_wkZ*V(*g_%mUT()SejBX*ehY7DOsflq} zgdcbzlJHST`1-tf}v;E^Fku>D0%>QL7QIRXM~OZPCoY#d@J7T0!4hs`F6`Y1YEHV@wu9Jj+f3q zU}c?&(e>!-Q_6b}e*Gl8|5mv9gjoN`}+mgopQ@Lr|xYnNyJ_o~b#b@5v_)y}GI@yK<@^F2U&t&Y^b;^lMV(e|k;wNPy< z-s*!|PU2<>?WR^ud!{wj{?)qR862XS;Jc#r#mr-ywc^Cis`xdUz*%+13o(TCp{K#u zt)SP&9_GNa;H(e8ML+4I&_^s_p4^VN@w8qVJwpR@3|;UrjPQcuf(LJ5J?oRy9rw8B zlXoHb@h)=si|9#Srz{ zh?VJD(F`8HyJZ*g;xx|PyL7gAXW4`2>=b_ATf~}|_{zTF5tA&{h<{Cmh+xbbDC8qA zEJc=D1EjNNmxs6xMi?4-B;#;AL_)e%Uz87kU2|~@_c+d4a##`=t z&3M7%&xESt6Q2J-C@0<#N{ctx))lsSUdYY+Wa0hNihG4fahqTh*9UWDH+l6>VXBaT z6Zl;)txP`F5w{ZyOA6)t(xo4n0Lm;%Ccd33$&>nPq0MPpl+;7hdf+y-7Br z;g|(uG=v$ijX6Wm!;s&c$nW|J{_YCh@%N($S;^mtrsd?i<3LQif`Hb+Gg27mdj>)h5^SwG;RSI4DdbZ-!)L|I550rC>BSfeCY&2 zSCdOYRFNrAOse2rAHl9}foQAXvd#?<4B#3*H}7SP&Vo1 z^kwvjn?-Isj+jgL3o>RS42lFt{u|!>Bk|;=7DN0kN=}v+A6QoUHKc>bkH)v<&^M5~ ztggRc+6E0%&-mmaP@JAIW3 z=r`$6^hm3qzlQh!4A1`;&#;PT*n;{iLLY%&doum+=CQ{7D!r1v4Q<0Ay^Vg3PkUP* z4zDx?{%8?3!v?;uedxhX>zU!Y3VL6WUw+r?^1Zf#^XZAVYdD&&>2N(Os6TdqiyQ|7 zxlT>;9K7S3cN=l?1is~nvKq5Ex6WxDx8ZTRgb|a zzJPAYtR!QnPiZFbxgzu?tp;xsiw~qHnBge!#W@^H>$!CRT=No<^a;r17v^yV6x5Es zHxn3c5%Arr9Fxt^ba&_PaM0qJ;1H`hYWILXp8==73zzvG1Ur#@!3gdh&DuuULBor4 z%vS?DZwj6sK-UM8WlRK#Sjf4t0rh7bSi~95ms{ZbFUd;21sz!?_<~3>l}vCA1z;4) z!8QcESk0*~yTD=$F!iL?976Uwj!b6;xy~XO+_m_Gc96v$#7%q}#{CNU&|M<)Q>ppX*lC@JW|n|3w}N~5h0Jc4t}Tl zME6EKvoXG_Cd7y)=3@WfXv`KG@mK@c=6Z0?f!EaJ_0_;dDznWB__$)wj+La2F2Z}} zr*B41`t4)^KS{^_q`)T?W==AR)YpN1mD%S++_7Krj=dxDJjDrh8?O2ypYJ$IroHgh zoA~rA$nxg!Jxt*H7=jBbz*)D1F{;gcRSr(4AbDRV7?=pEalm*}67|5xpt9*H%BJfu zDeABz1arGTxRuE+H4}LKM1wFa1ltqa}aJ+mWG%dgkjn;N>H!dU^!$ z?gijvsGu)OQ`E7 zgUJpCgY5+F+K8T;6~O^n@rN3JJGI&}@WUz8`9r)f!5nX*dpU(yb}zMlKvTH@etHsl z%24|FfJDO4KG18yNyiZD3V64{=`QE|oeAGN8veJR{)1@t9*+14TBaNHYCA(unS;cG zZF;!Anto*S>D4wBAM9v)2n^C+X+4Q`9ndYc(9fbrI-=D?uT%-0P-%2Zh0ze@(x=mp zZGx6cA4UJPq4ZqqPh9Gu`KV(Ra#5GooEX@c{yz1Hfwk!IQ;j;elJ-_BPy8!O{EH#} zm0^_rKi8##|G(tES3DkgUf}ufw2Hj8vi4D{PAsj>_Ug0U#@Zj=BboQ|XdQ`--8F;W zM-%ly#Kqz4*ElUBx~Uv!p7QBSiQ8+zIkxc`_M?S5p*2I>)DHh_54^sEi0`9`mD7lo z3w2RntEbR+;*aHft5BETZ0+E< z2l!*qx7Nce)sea;z~#(<_gUkA1AFtEsOiHQ6-gh$Y|Pgsg6mmlL8DZRioH5)Tyq>n zJ&BbgiIua1zMfmm`7gn3zmWmRoJCgRWomNK+-TNI6EABLFI%Ew?}@T~1pMR-6!0s! z?BF~-3Lkxiy6q7R=?9`^fOq!MalrwWhNfx zBDNI*6)Z`PTOQ3)HPknCs7V`<={5&7Y)y^Y0mQc}I@q3QbNZp!8B7)X4?3nmFQ9Q~ zIVYJ9(IMa%*;c%H9vDIZZ(M?UX*pdiSDJ5t72iVNbcYT!cX{PKaEAM2XAk~&1hk=n zv7WJ(v4*jlv6APl;Q4`^ZYi%{!giLhy~XH;7SdsE0o$LCvS=>vHJb{1Cht3)_n#8f zm>$FVdWimXaeR_pWba#1W3318SP81Vh+KX)PS+`X@^L7(Mg;T3O5h=7z($LjyAaLW z@!hl_nl~nz*QE^wccPJX4hKvHdHf`jV2C!6_dFn6d z%U3Yfcl;*LKx6JwVFYxmXFz9;@yqSwx7&u}bv;B)=40)qp3`WGB@^v zx$g#B-yVLyIkCS%&4gdZJ)xuYJfCTEDm#p6r zSY7Eh$JZ5Zo#R&EhzqQ+v@h7n^8g(CZdmoL9FgnsX#_fZFGdwR7wl;UbH^l@hOx}@ z!&z&1FkPVg!9Vm2=D=;xU9`YQ+muTKF15K-XLhNKi>5rsd>Q(ImgEd5276JMY`Fj& zdtT0pT+Bb&Vb?Qreq;zL`BI?nhy*uwpr{JrB62aqx*I?&5Rk&Q2XucDogQ4ce3HPH z|8lnd;#~Uy2mcME`3ubZCv+nTu+Z|lCp%mCc zO|hBFCg$XgFhv`{57&d;tRoJs1;ts5Rv>_}uSH1^7;6}-sX|sUR-hqRj;3-cT+?Fa z^93l==5m?EWjbnuDX4KKz(tJ(2^xu>;2$!@PuP#yGP-tU2KtqZDy4&;h$sk~c~ z5yVo5HzpD`fX}J}o?n9up$b}p@-SOvsMCw1=q$)8KzXQpvZEQui0Umhcz*;4sEta< zNK_4AOvz{fe!~6#3+MlqTK_q<(F3^ro5bSF*@%zBscczkR1uxzh2B9|jaAl%_hWF>s_d>)y~Bcs8B2Qjm1sKH=WHI@D1AN)YLdXe{scP6~)1oE2Ua2WmJTe~qgwgqQt$~;-yyN7-eTk(sm12bDr zUy%9K8`HtG$J76M1Ul}4=<#|{k8~pMY2|$Z?)`w;&pZ+zOtg88uJ?dL7n6=Ur953l4Wf zj{%b_p`WKdJdL*bB=yr#>cm6zxs5~fyh~51Z>Oio7W%nvfLB?o+sLN`uqHD#rAc4P z>Agg!GDIuANcaACs9Y-ynfs;S^&+kp@_%3i?q5VMvN$+&uD!w62euQ~-g36PlA3Nc z+Us?^=LX(;6Z^20ec8c2?GEl+R`R=?>}!5Ko_#;dXShTzbDg^HHu^!nF+OJ_`t~-b zr+FLrgn%ZoCz`5=wZ7Dh z!{GZTkn7HdJ6}rPyB>~fH^Yfi*mTqf3y{DOl)cPnTQx@L~A! z$8&7X^i|?mt%u5@C7Ot?c!LIj*N#R#>EpV7ztM zN^QRJT8x^((U#k7xvwpcwdFZ&d2WCMY0v1u=*Z|49QA_R32f^>qdwOS*nT74C-8pF zc;Du{e=GK*4f_*V`=TTJ*@gY>&Zq6gXXwl43amvrgwHyh&pn#&U;^LA)ZlM9mv1ug zD=p=lUkQu17KCIYdDb@a{#|5Waj<%a!Ap*Vn4E@Dzd#*u6;0|boNo8w*q@NWy(E)+ zhcf+Nkn5jR21z*IWMg>H{U(A`hN0$6fs#Ev*`;k(lHxeuNE2a|h_q%+F|x&cooC!G)5w2T^JEgJeQ_^fuZ#>syCa>vlt zp9Z_Q2yS}aWHjHUo_K7ECf5!0e#vS6YAS>eqBJ!|WwU0gO@7=29HuoIlFsDLy;159 z26Gt&mNF5BYX({L0m?z2FJD z!3%c6FV_yeOiR2RvH0d1SrW{3EpO=i{G5KCkIXTayXIn+>-an_^0`i%Ggyw%+cVDW zu6nHkh~qrtxh@LP1D zhH1|4Q-|86JatPU^LtYke#&b2brCP=>J%h?6aD4Ovm1+gATpK(p zhQ8tXIGWN?k%V$fHKsQGFq%y-sa$WO>pjWe9URfiInJjNS%z>{bmpvXM5R!Xx}ZRi ziA)pB@13Y;KTtW}BjXO>=DUgAE67KupmQBaoNo(CS&NtCkF0_Au@D(xDlkzm`ou4+BXA3F?^V&;rfh#D)_R||{HTdc@ z=EOnFkL{Q%YoV<#4z|Q9AL!6ll7meIgXjgO)EHk?3@Ai4_{k7dcu7GYa|3k$G2~A< z$(tha1?%9$-}I*B3+2cma_9%Z$JgTjnx!Yu@BS9}@=>tn4dBkRz*>gTzp_33JZphl z7X`b?pxx1|T0Fg-cfda`)o#M+9jA}x7PS*RU?cd&O7Mn-(N1L6YQPzmQ2lClHBpUH zKdMIciJGKbQ$H&4XfXDvSCozFF=eT`Q<;fYW2`zyp$n(dLmjBl!4oY<3v?ZIl^C_6 zQb;YXD7!%v>L61s38i=DJrTW$xh|3{7d;Je^C~71L-I)_PClyamJcf1(kFrhPr|gjT^V&mf<5+N8$K|uiX|{bqzNTD}Z!0(D2g-fXlWLyhm>{d1!cE-Upawx<%;@8d8B%k1T_r3N@nybh0*L(hL>%s=GD5XW#D&f zX*1MT+A4aZ?xDZwS$btZpvUUJ^xl@$(^{Bz7cJ0xu(>4gHw$Yaq+tz&e0UWr(95nd zd`MT;IvB2h;ZU$$&c=-G9K_i&exm1OXFdL7PC&x4yuq-{C6MC)W57? zqtLx5!vBFjP%1cVG#Ohy^0%r~QEky5523=EMRvB4Gwe9$*d6k-kDPNd8t;GxJSQ1g zp!Z87TpZnDUq@3(E}|ygL9TV03i=_L)n_Vm6~AnlF@oM1S;?1*GapxHevf6Y>_#jY zMwT%psN)Z4`!AqYcmO65@B{>Ucm?_ihntd&nek&40@o-H3RZ_1t_2>3uFP(O@%N5r z9-B)PS_u-i72G7i-^GKNUnXYVWk!1jXYigG?K{24l0aKj<}H)?x+#=-D~ec`*8CF8 z<~{S)7ciV3ree%pWz0@f1!7_q=CA7b!fOU&qyX+?Ba()IhS|Y^9B3lLa4QF5XaplN zIAXbO%KuHczY)&~Jio5lio!+!O*6t3_=vSC@8L0(L|<9N{5RM)GZ*il#r)Bf-uxD9 z?zzcfeq;*tVD>TtyTOhA5PiR}df7WPdC!Q+_t5nOw0URv^hd#s_R>3TbI_;Qmzlc@ z^LlGM(2e0zYVutL{2;~fe&j(Zk`b&Zif_?M)YkE8B=J3erLXI2u%QR!8CS?T042ba*TD(SqWwRDzJE6r#3mF1D?khufDz1~-k886Bhlao zj_3Y7+6%3HSD4oJ%o8oqXf*{rXn@i%u#QYMaNtTD|7CeaDQ1$QsQL?n59ebp$xS7b zgYzRBimohdArt&;Mo@-~=;<>sGI2X_UuH&D9?#D6a^e8U4fc=^L?W=AqU=RUboymr z zFT-(omJ8HQxA`8P(`)1t{eJ%9yU@@_SixFS;`GSIoLLA~uRMHB9nRI3)Jol%Q-^ZC zP68cTNM*Ev-(){n*SR1jmOwr97pBik736@=P9Lm!O5l#E&hH&d1{_cv|HEY(N5l#; z+MS%iC+Pum3l->V96W!>QH`cQ##CUCdEuqXlbJRFtLzSPJqm}@T(ZzjoU2F4DX!BI z_7%tUAG8)m^BsBzzBT3L94wC(wUIfB93>msOfk^hsvyBl!Ed{g7Y;?oI+c898EcAc zBM&-8&UXzD>@#z9%NKJCv;e)xyGG-8olBpEb*#s#^JBJ?QmQYf7hJN9r zP)GQKQbR$xU=h0u;o<-^5F^lAOc3&kGlZhze4&iERH!JfMnADpsEanDvA9cUA?_2} ziU)*FXePRg$AsSE386n4ib3LOVW@ak_(!}Tj1Vshqr^+X81b?&mNAYo{=Xw|dn}KQ z4nFT3&p#y$32x)4&`UfhbU}yFR@@=PikpQ7;#%|@%Y+KzJaivZ(QAwma*9LHh4dDp z#14X0j1{~n?VKp#zVW%<3i;8LWI?N(QaD9l`#AdOZxmdX#lm;X6yc5KAK|{Gd+>WY zf=*={+T^9Ig))_1#lz4gceQ-BG~qk0WI2cKWe@t7739TJ$#Vyi)3zp$t%f$EfMq9I zxMi#pFo}GA0GNJj*0QQj{$GgSISt6Xi7McisUkfjbC}PAqwnJQScDR1G*Pe%QN1p> zcv0BUwA?aK;e0bjneM|aA7|EBPxUmF+NuvnXJcy4Qq+AJ;9ZPFrmvvIcfo)I$m|+W zf=Qs8J@8D`=S(R^zmn9PI~r(X0@>sh5XD_Emh*7S3?VMGD?qRIR9a2dpcPhsscF>b7uE>BiQ z$U~Lha(AVz+*)ZMH&80eRh8m$X(hK@KuIrWQzGOvibW1n6v?9el{EQZ$tk~*e##G| zf8`s}JNbh2Ts|p1ln+U__-`U3@<$`JQg@eZ7=lDL>@ziZ07axMEYY zDrw2D^Qtw;wOgU1?o0kX0qyh>a`K(ZeDySW`UB;#`kCBaRbHW${-fqnOhyy4ofe7=P~rgpRYYtH_;FOm?m&6WTtK?gC@E$I+tE(9Vcj|;GOHk-FAVK z9-+UZce00m$l>}4j<37H{v7S#Wru@RE(G`8tyiFiX$B_Q$LrB2bBwO?=0z)175`5g z);=4GMsqIulx;yhbPPVHMre$> zRRLdb!5rC-xo-+{?J8!`{hU{q(UHCc%m2eIGrspU==uVl!Yc3su|a;d7FlJxVDG|> zFcSf9VPASzwEbl7CdCqHG3M)1Au*khK76wuWf1mzn7d`OIDT zt~V$n0)Ae_XbSpwQ-P9X3$AWgiCL;4HElcc$v(uG5%6z;b!C@;h5Scw4eP?RHbGw!=y%_N zI=maWM{nk|0YR>@9vyHRQI|KPdT)thtt|{{dtzY+*vyXLBc0$VyHLM(h5778wci6K zv?rKJFHn%c=*`?17`=kmYq|YDV>Pz}&s)Z8mhjpD*SUag&kgF~TJk>4dB3K-ZzI^z z`uKZlGv8K6Cs&DmD+h{Dns+MBD~s^Tg2Dd8fj-5pzz~{&rbY618N&hBG|4Ig%Kql~#9Q`S}m-}R~H^H2) zz=EE~ofOXslZS)(?L_kXF+B1Qj|_kx?M3#`g>1hqIYlhBKm&fanq(FMk8~OG-J;|i z`Kb_cf`Mfw?@b2+mI6O>IGJz=x)}@EumSC-j&fcyrp3z=(D;ReUxb)GgR6Zo8cc8C zPG5kvJtaSWNM3W7uF^NimakBmTtH`YmRjW$O45_$J%PFBC}!Po4y+Jn0``sB^pbZ+{Uu_x+_tK)i9Zvb8S=> z4(hNd&X?5G7wP#mGV`nCpaRJQN>qTltSA^!NjU5n@WqOBXR3ylsW!h~19HEnRAnux zS=yp@>V)2@8;D{r6j1%BX$F~Yp=Y?q8TgRj`zc#`!EFDAiY9?y{4-npPJQ=_t^VaG zNM_4!YCV~wLL-;-Q5PA^>p0FffMIQ9Z06kD!r1!1L*RcgI5u;=iLsGdYdw#z<@u}0 zLsy~ET0!l#jCye?`rk!hVe_dU=Wxu<;Qgnt9~01@jX_&6oMU$o_-J2_-5wmfornx= zIjWoUxf=2L>X6-5qhhPTo|R^Ai*Q8e_0}#bla)mB8C-_L@ARMoYC#=SliH>fby80J#VNquOvL{r zoRtZnZ4XeaUVtGz$Q-c=?dl@3rHS+;8Q?1hbDD`;Hhcjt`VxKcM&qG1;tlu`uQB( zMh{h({!)496O|1NCxhM(y;N5%l67W6^;op5_0dvQM^jZEZ$WYJo&wr0EeE=*%;>Mu zqGgR{ehx>U6@p$%M2BzEo@fT`5jy=xtl#rk(^$(*RiEN>dd~kZdF+j520IebaD}iP z2iuE8yOk2HSXvU)jOezqqT9;Fe&k12R!qyO$Dn7ef}XXmR-S#T0jAhc?@9hR1e|F+ zJ+$U%qxDs+KevOm5{}S+>!Nm$wGhs;e#1Ter}h@^^cQ)jre{FkUrKDC#^=I^a`9Z%&KboynzWQKK-O%}u@wW0UW$nzp z-s!$8tatOydj!w)P4LwO_;3lmGAlHp!x~L)u*}znUL-^Dh|gucn{8-V&rm-lono9K<)j%?lGR$;=GwPkOy2)$o2nsTa`CtiZ%6;4ltWffS)k-vCPAXL4ML=-s zQjK&)?>Uw^a5;1Fe)D!zMweK_@}1cO<{nNbq+G08SpgqZGrIHjVkMbz)H{pmtGB~) z9hLt(TtaRfLt%o48q-F_m=6C@0hI6+af>!)?Xj-H5#b+n@iT$MI0eUiPObQ;!-iMxLGVA9uO;tXT+M~O|hZ)RBR!B5Ic%L#U7$t z>?iugp`z6~QjD~Y6Vq8Ii&?GH#oX4JVnOR1v6yw9SjswIENfjLROC* z6`oVsI+gn-@z^-AoOOg)#yUhSY3(N#we}DTSUZY2tu4h&)<$A#YYj2nT3!^aMMX`_ zCH@uDil0Qg_)PQ)H^o1~Y5W9x@f)neA23&pqh4Jn^buzZt*MV|irx5pjfIS2c|j2K zP(w%IlhrI2@Ph3^_rH*Jc80U=O*>({rK-?__0j6HzGhLp5-BZ@EVB6+{LM=Eol%y} za5q!oc6zX;Xe~I9+|-g5ysaNhGpHN8G7pqDkLDL`$y`x_l^Vi{!#|8;O;?Purj5n| zrqSeqv9M}|nJEKD%?Cq!W4xibaWN{`J}8E&!9%16ol2sE`At?M-bh#Gf9S{*=+uyz zsOO{$!gW}Mbtnx7qdcsR<}eM%_fK5w7wEdS9QM91k*X5hK{V&d7b?AY)THzExm0{@ zS#dod3J96={knEYTcJ%ym)V9GQV1WCQM-?4a=p4&?XON&%d0I_yIMeftr+lIK2^pl zdzE_1WF@`Qnp&d>7`&*=m*2{r<>PX3dA=;jJ>}<8WooOm@&qYaY9`&4a!b1;%`;Vc z;OQi7_mr2$deTVEJWfwO&wY>46X$v9p6fZ}?&F#3uIuUU&g-e{4)LUS|8ToqcioR% z2i^N!%iQx^Bi;R6?cDWTmE8GVIozSH5cdz~U)OEtGgqARoNJMDr|Tc*B3E1ISXUKi zPgh=NGgqXuvP(@a)e_A*SR$LgL6vq6X($68_urDXPvRhN1Qd1 z_c%)?Z*k^MUhPbqyx8eTp5-(qPj*U4W1WAJhC4qe4RO9r8sL1M)X(`isgLsk<6cr< z=iQ|Kj6vKU#(krlPm;zvUnWg+zE7Ix{F=1f`8R2^Q%;I=nvzdB!+5{+$$GPq%FJr&sTzc|#S1RWRR}rV~s^?7S z?&U1yp5}~oZ*dNBUvw^Ve{ddfn_Umwxm-!^I<6F+0j|=XrLMM~c-I8a2iG=Fi2IhO zxI5X?!JR>x>8>FiaSxQ-GL8FeorHtI{nkSK4&f~b^{ zyQA_&UX7|0`97*iq#V^XG9r3dWbWvxk>#V8MmCJz9N95CE^=V>$;k217bE9J-;7)p z{UCB{^s~sg=vR@)qu)oKjs6gMDf(mN_2^HLH@Uve|F@&xaQiv;J>;>QJm*5>iRj~z z2cq{z?ugzPxh{HfXEghi$s=;&JdY7T8Okp|B3h; z^(^9P)R~CWQJW$*Moo>F64fK3M^vqdT2Z+p@l5!W!FCg{8N@ar_&)#j!uMr(lV7nmL;^S?Nvxl+p>_))S{Pm$F2OR~jl4l~AP?7?MMoDL;{G%j;w-as0Ye0$j@=*CU39OKYXao<7oQ zPmI*rW0UfFUV0MUdpsxI<2_T{O*{?UIX!9Js{6C+q5FtymwT#fqC3{r+MUx?%p5q*vJ<1d$+cb2lMA?xBuBUwCA*vhlV3R-C0}$FCKg8~FLk<- zCOTgbZ_g&Rc5Y6p<(!pN(m5z8hqF~u3TM?Mi?dLYCpkmXkL1v#_sMGF)8t=?cajql zuO&ZCyqJ7D@l5ii#Q5Y>iN}+VCLT-P&-LEK6Un=|Z&%`3#>M14iPw|k67MA+N_?Jt zA~7NPY~s)4tBIcEdx=KptHf~Um&8m?S7Je@C8?q_Wl|Go?xgO{n55Cp21yH?os+gZ zMNX%jg%>{<>G)qT~C( zn-S?ftkv@V)y9#RA7-7gZ`3xKeINB$Ul#8yUwiKv`ne{eADzFwOYHa1_4o6Rb`O(?nx6(+T4c5Rgr9 zgZHRlHLBjc<}0QaAX<~av-W}gJVSlsM^m30wMs)egAOJ4U4h2s6sxblB{wq&e%$i~ zg_^iLd*b|@34gGY-0r$y5Wm2Q2x1d42f1ujaV`1kIdP2mUR*93tOvv_)*E6)>wB@C z)gz9y+O12iS*-i5#jQ83)tLvHTUBd!YqaejYhK$_YbD!aYpiX(wYP1zb)4;}b%E`y zb))UNb-(R_^{nlM^^PsU`qK8r`q}o=`q!3dm2FO|-{!IkA#Q6ZBRs@qjSg{IQ-vg2 zQ-}PrrVaUOO&9Wk>(|yaAy2F+L+)53LoQn#A@RhT16D)GHmhP=WlglrvwpEnw7#|t zvEH|JwO+P0vmUoqweGYPwXU*dvd*-HT1VJCVmI3dv5D=DSl)I_%w^jkhTCR{9&3N` zowccW#ade2ZOtIgxB7*_)=xsL^_o!9x=ToHoh3+cGS9_2!f`RLutF4t;o>JtbCGo= z#SNBlah&Cc(8h92C}r6qL|G;Xzs;?Mo95!ec5|pO+5ExM&V0gB%Dli5Zth|EYN}v4 zZ;G<4GJP=*Hk~rpH7zt}Gj%h|#?t2d#t`#%<7?AcW1Oj(ae^r?G0$hnYIf z1LdZ2SlOy9Q^qMHlosT}MU?Wy(JW{~%*ti?yS!PxCr^@(k{hp;tIAX5>~eqEBDavg zk{{oe3dje@k(Wt=JVtU#U8E0EUFm^TOu8VYmkvs1X^ZsRvrKyKnJHcLjFI9z1Ef`+ zuF`Z*OKGU50Wr9m)Rd7Nz_hgY$dD2NXPfE$_j*ybwcIms@CMCE<>4jU6 z9x`q-uDPw!C3lE)j>pgNywmO!(kXX3Dc+rh>s;Jdh{sAvm)#Ym>+agp9d{Gyk-MGr z+}(q14VAvSCrN+Z^Ci{2P7*wOq$tlRDYNI6RKW8_it+rFYI=-vb5AO{yC<(a!c#$> z?P(&f_wMLTIU;YAbWY_8{MYTanOO0PbJFRTd zJ}EafyZTovrKZt4sg?CPYA^k$x>$dwp3-gFzj_HR%-c~b?VYQ2_Qq>-ykE3q-jsTR zx4Q1|4b@|OTlDU}hx#I)kr-FZ``y>Yo7%tFTit)rJIL?!uJh;i-SoHidHi%fHJtV} zHT?EXG-UC|8JhWD8>ahh#&~}z<1c?VV-~|AV>81!<8;F><0(T1({DpvQ#Ru$Qw!q` z(=6i?($Z;MQ-2 zOMMQjji0qFqEpiux{Q3KPe}@KIu7}hu(02S#bO5Wlvt0dc#LSb?iNc}pNVa)f^{-c zZHKk9^|p1M^{@4$HHGb8YiV1gt&OdMZJe#QZG&x@?V|0x?UU`hO$dn!$rDnRe79Rj zuaJ2m(?gDhYz=u6axuglnh=sN)ECkyG-K$n(6XT`LtBKN2^}1o5IQ&1WZxE=!+s{T zhW%-15Bu-X>2`~KyFG*bs=cWFlf9PR;An5p=ooA-?U-h7;#gtt^Nax<+x_w z?|5Rr?D%MZ=18>vAZOMcHb+=kv?EhkdPkwK9OTaV9Sy>YI@*Sna`X%<>lhYR$uTjk znqy8_O~;b3I*v7A^&FeS8aQ?`_Aw5G)pZ==u_Iws9Y@2;GfFWEGjcl)hh=u`4@>3P z6XtMi3o|)3gh}>QVL$DQ!`|6vg*~)S47+3>9(L5;H*CATW7u+g)39mw>S4p|CBr(~ zbA&apM~0QLYmRL8uZ~drBS&)RS;vde9gcIM3mn@*hdbtkws#B+t>$PJn#WNx)Zs`S znrN3op4eZ89JikiS!G`vGTJ^aq>a5@NQ}K)NNRhU5ND{%b}#gyZFgv#ZEEN&TL<#a zGNI*dk)bJV-_twlQKwTVpZO zmP7nt^$16-w}ffd^+I#&U?GRKx{xHMA~*eNSsr!H#O?weOC9uWzM) z9F<-}R+GuX>M@Dlv#cO8ixmQ5eSL7EQ~=eB@E)b}%0w!~`nW;TdOPEKC`GpwyFLje zVkenyNwDy1Yw zd!>J*FH&>qzEo0*mr_aFB-yh_dg~cSZ0;@X^t6!X6OTuDibOM?!NE2;l4)BeBQI$eZsTWebBSOy_fuXyJv)Zlc%qHou{*VwWpPP zrKhobxu=eMsi&%YiKjf*rTM=&_ZN1r@f37#@Dy-wW9;_icOPIJboV_e%=1ag;*q6do-nzFCx_hHQ&#TpX)I47mMY=|r0QB@HIx32T1j86cGAzP z6ZH@3MqL0;&aWk~UcJftkCw~3hP?HH*4z74o9zwP_j*g|R7iTFcZ#0Ew^J|WyQjDI zx%JV$jNUcA>csdy-Z#EQUN6~0X8$X1WxwCs*`LEV$zR*I$=}a+0bJs}|A7X`1ygg_zoEvG;28B_SUQ_-8HdCE&4C%*2&;7z)bKjGxmREtzhHcP z;KUK;I8#>hWm8e}3lxICO^r|oc0je#7j|?M-RY;JxL%C2bOT<}JvdWO&>iM7E1%qF zy^~kwr>qk5#ry~Tx`cM!4~lH1%YK94Sx)@?6=w6-m=2n+%nhP zz%mhy)j#xh?2RV91C>BC^yszd?_bU=S_-1c&&(P)QS|k<@L4^kYpmCD0*&Qv^DWaF z^C=VEu1w?L*anzqnL3$Anwpq8H}~TdCQ;)%7a5R73u;r9-$=vI#y$yWw_-ZW;pKe zWY|W|yVPIGFa-zrV7eD~LLt-;#bHIdX%s|dm=O(w1HZWLn@RVgVRXUmMAzH;{i#bP$;j_ceiztAlWeb*5`p6C7hzyhXX>_GSfb zj>b`MMyc+?!}o=L`Y++aZWG1N;bS|jKf<+t3rE~VeKDByEbf`0@5Rfuo&E){`bqBr{R(S-KV&_C z_W?8r|E1+kF5pX?j&U*TKKtiFvN2PeU9Z~K3O z-x}f2(x9y=h_7f$Iguc$IjW2 z)i?xBZH}RyajT(|@r>ag<5R;N;~&FDn3ChDx$ofT{)iq_!SfVhnrX~s+F~rvS=tml zxHqce2`IK#puX6LBI6P~+e?%ie__!rpl<15XNut%s*OviJzU#hI*?9BL$VgN%V9Jq zH_)lP4R(~hVzyb{q3)D$@kG+)DewR5>ny;dM&GqRn#_~}3yZ_z?(Xi3`{G`txD|>+ zacGM}p;VwiDO#Y&QmnW;6j^+6x7Z{(zZrJF|M|Y_e1~2y$t0P~OlI;vcHj3yr;_^Q z9lD|ZG?tk!3^l0DAUltdeYlHC)Mtl^W>k7q0t+~+q6^y^rN04auTCN3yqt{0HvUfm z&6(;EQ$PnRm++2yluS!ijl`<1x;($SSjtsFEaWOF z<|b#F#g#)$=gJ@=(#7km!dLO5@J4(tJmNFl6we9g_)N*W_o zCk_z$i+>0m#HK<6u?ls>!a{B_qu@^-LlxdTUvM37pfh$9{OL9@r>ljz&RN1(Fs;3u zU4&-TGAlbv33;7a$;+DHYF|44a$Iqqb|g7>I3lU_g^;Hm>>T81>1^#N=d6e>WG?We zcEOi7;?WMpx6h1IBN#4zZ7hwUqDZPGds~?wz4IeyDA3c^=wdVfuJHtjI%c(1NPX6Y!xYP`ZnQfiWA#?Et!TG=ULb%rHUKJ!gKufI`TvCi+8e#q_S#u^ zEeF)<;0#NnSDRm3q-NEol7Sha+ON3U>_&H11z8G!UaE=ePogC0<>lY**KTtBiru6Q?Ar@yan?^AxXrp7(GKzE7%pMtP)OR$iz#l@ICz<%jxQ zG0?3Qw9iU)>r##E!Ck~XEjLct4>Gj z5tN0x0xigBZ81EcNOcd_@rbrZJqJJN7F@2U;5OfDk5!rLZKpn$&SR?CJ$~RfGHazg z`Ow@c1>UzRnmY}_ptQywcE$DuVDCn0lfZS(2FDSGcG@cJ-g<234!+}q=$M`0`@ZaX zLcaI|c@;(bgiTiAxI6XVz<3t-WY;Tu3ShfR=`GQ1>_VT-08caewzbzMV#{X3D_RV` zW34Bc-0}=P5qp;GS%yZ>I&{^!SZ+^rs>+0xxDFwr(~aO}JhiE?om7hF)U{M45Zg&_7K7(KC7=Hf}U} zP7|wMcJY_E@UvTa5g0(^1hks=`8V0Oqcdmj6j=VA{q^| z>4O?>xXcK&c;o0fumfHb9ow;omC%Z=4HmVrdEaPhJ~uj0z3GOIdtZ914n}Ko6gm*W zV7q6))0jtfCmgKzYV?3&L414ldv~KCk<1aN(U`ajuf*Fs;wgLI!om1PUuPX3P@vy( zI?HWlgeUY-eoHT>AM}H@z*J8IPbw=&m3%N5OTcZZ0tcfZT$i@KejVOk+)Gi{SO*e4 z9vn*|3LS@F#+*X+<1!faR8&Wvf>?jY+IRF#444@rh^`EHxw+tG6o!-T#lBVp^;I7p zx|e_635G^*I2wU4s>i~yng;uLJ}ix8pa<814vYsgmI%N25ElFtzVa33zq{ZXpVFV@ zEnJ|l)-+(zvVuv=4_=})9d5k(GL6CXwFS@D4QoCCOxq}U9FwRL%mRZM3T|%&);$ue z-UhZL_&7;R;192~oo4H0P#0eQ@B`Rc&)^Tg1MT-69+m+jL$qHeqPhh}^uCY(R0IxI z8FX|iQFW*ZhO{AZL^Jrqt??2%fHCa~Z?!iuMgaKJK=2;J@f=3OB_5Bu+eEOclc_vU z1-Ci_g|}I-t!5LE%>{uw4_(~_jD?I)Ft_21#f&A4rM}USWkVh}V0(R*^;y>AxVqqF z>+qb~JijLV+3GM)s`|LglflIX^V(y14q;0+IfXWUIKGJ)7jy>4g4rFpu5QufyTABE?G~Y%caGBXr2ujZu2k2LY==KBH&gWn~?}B5! z#5bG*K6D>UuLK|N^Z>t$1hCSP{8GZ<hVgI%1L;NM)r)Bh)1nbd zF;!5`Du#mGZ`jlfXu&v9Ak@tC#O(syF5OI}*ULroHTTtHI8JxK8eWFEb&9&{Vfrd0 z!J6{&qGDlGMS#Isg1udcMtlgUoXK#w#?dQf1o!b^?q@F-tDBjPJ2^c{FfMe8t+1|C zqlWp7=A7~8k&G^ecX-#m;|TkXam;bw@ws?z9`ho;`xSbm+@M#=9iuF-=k4!V9d2@6 zaFUJC)M&-L)B#bVBKivkIs~GtGGAKM%VlJ!6 zZ>s@Zvu6C-+OnlH*wvosIP~L}7zkh7+f`sZkEVcc^=eGaM_(bF*=+?pw6#2nwT6MQ zUJ3?!CAjM~p#G!5KF5K{-oo#F2l(f`@Bj|t=_SKcJIZ}Xl!pk$ji)z*ZKdB zj|1st3futKeT#eL4)dc|jV+Z*X)4MKULDI+xRR+X@9_9G%UdjOvi}CN=ry1A_G8ZV zh;u#yvHg%&dC1-N7q9sM-1uGI!yVqmEwJM^n2oRV&aN^iU*=t2;Ju&Wjy%CB0jU-TA2XV3u@w;b~@#lgt~(U=JswaCHD|@?L&hyHK>+W{F_WN)VsR z*|P+OaTvd^MeJF?p8045%w_K^_J%Oy%;bm}K4#-WRII#X0b?$|+gT`D%>?26Q`st* zwefJjg7`m(zj2JQj4`Z@W^J@@IgZCctdD0ofi2$S|2=vPTSv2H6w8tSj1eq{bHq@N z9l{9YOh3ms?EE-1YR34+&-HO&t;Z2vjAe}REdyBg_l>di${vfZjaS=xG~H0gvfaCF zG>=EYAVcB|n zX`Nse`x9>KZn(Hxn6F|%jzM(CsW?nAEJe(gNFAI5(G-TPV<`SPD_!bSL$7nm;gqL>)#jQV4Mc#!T*G4pw*T7c_ zLq~KbmFn^MnL|Ko_BQ^4D}RL^m?!j_=!up^OR12tmOM=;(b+Vx&0hXf06M*YkcDeT zf1{f8J}M2TFE2eKensLIzc8=!^Gf)_f1uI4^+Q9JPGV0i36X(k6X~MZ%fJ<+y<@NkQY|~RK?&*)F=wPt)!>RO- z0iQk*e%f^UD$fO@9!8wAQtL;KFOYb3G@V#6n z#(C(O%XhT`-asr>u3dbONA)sjYt`oaZACoO1KsRlx&SURlMx2zEt=oNE;JR6)934k z-h()02t0*gVwAbWCo75LH^PD2OJ{?V^c=fMmiHOja9`;*Wj8kykL>4ndKO>fu2CO5 z-xmAci{JEc_~Mh%!(E6jZUp|zCfMRhcrPcg*VpiM9>W0tfHn6JdH4}qWJB+yFunE4 z(<`|)eHoe&Q*`1!?#rX$sA5fIKA6MZzm!=a3e55rIQB^}ypOSV9)|Y~qKb$7|C%h{ zS9~`EE{L~xaz?ZkbMjXZB}T8VRTV5l9eh5onpGRDM`ySsJ+UeQ=s6AXDOUw!QKsWB z&gK75RHBw)eIr?#)R2E3%( z*yg)f<$M1O@A`kneynT~wsH@)au*hM2exu65z7`V>qac=dTeDhmNk;|ug0pb;I)>* z#tP@`3wg#otm$kl>2$nBy9DcSmvWx=o7rVv#4lZ;=NyobD4@d+9Q57FHr{g zU~yArNry*}8P6aOQ9)7o)n&OdRe4kwjkMqKAKG%Qy5Ljv<@ycfS_Yw)HJz*P?Gm*D z4ze0t)0Y3$tY!;{E{ZLnj}dhWc4HM_zim zl%(=jm3(v~dSZ7VGu@k7-B9$N#>07^;aE&|I*JNg0vYQ4ROC_|hhc=Ab=)Uc{StP_ zSGsuVC=s~mdGo7NgmwQ5?E7r6@bkmQFX?Q;T5D$wdf(KCDbk#d5AEnG*2Ou|*^>@t z{XtO=afUiakd}|H z0{Fv%bd}3Yl~bg$DcjxTixt$uUZWg#huZ5Y`zAE7mZMJ`LQFpzOnM)9t!?<$>+s!| zM(H>QELH(Um#_Rv9;0t|g?z;!)F|T7x>`Y;J%eA^Fc^xRVe>UWslGIMk?gP^?Y1A( zkRLNcUBFx4N4ycs46~3~aU9-uPyU*44|vsXv*T0Sh;rVN!@0>Fl+2wT$6X!D9T~)& z-GlkN5wTWje6TFU#2%l2WhJNd7C-6+dWna~J;mV-g`w_0p7@r!IMHreqTCpUI}p}zWN z^a+;emFUKpn@*A<%G;kj^{88wLbEap{BR5Y%R8!l-kv)t=%Q~Y%e)#6U5Lk@PTfDW zuAZk_6Kr)QYHj(*O{PQhT%(u6E6^b~wYJ)E5F$I^#jVq_(wV_d&iRuXh;~6&^3A`K zAFZmzs727X$_#hGMsJ)?uw5Rj4b*EO{Zhb4C#rt*M3c$+K3Ary*OX!EQKc7s&|0gJ z=sGQ=XW1kr553AV(A&(WHdDST)sz=XapkU(Q@N<5Q4Yh!->JNpW0gnpa`Tir{){xNr}3>9Wm^gMmEyQEJg1zTMXD%gld8!% zrP`dcft*ijDi@Yo$|ZO&Wuz{0Io?}kslQxX8Y(xE#)37UD7TSj$X%p`axZDQ93Vx? z!=yMlNZKw>m-eHva7Is9U$+Aq8lyck@TFt4?eIL$<5#|JCo~ zI<3d3yF8|nZZql=6AaXwP@f%)F5E)&Rd*V?ew}`B->K+iHAj+DT7_SC#F&LXYCQhk z1@6vw-Y&bAS!OxQcB)2qsJR+u6!BIH5!g$jM}KRqrJVJgc>94Sf z{6zKu*^2H#!64#1N#DgFiQ^g754{-@d#?4|G z*HN)080?|ouIIR1pc!+!_PZ*%uDF`JUbuR=OxFlk7WYh73HNeWUH1l8C-)xL5cf&f zH1{o6xciwa#{J2)+pW7!xc%HW-C5nw-38$Jlm_ou&7Hxop*yc%D|bo1&h9FHz1{Wv z2D+R3jc|AL3v&1Jo8%toH{CtlZ#I0O1@6gyq3-E^VeSyWaQ9rlCGPotOWg}uF5+=0 z+e7^pu|ALOA?%;Vag*Fr{l>W`_zicD@f+YC=GW65;MdXJ&98;KEw9_yue!U2Uukz4 zzr5}Oei_|a{Ggin{cy?d7hnVLz)v~tI`2+$9pE!3yS3hhnPwDTlnrgMQ^4!P^Tg24D}HvJL?ENojHZt^fCR- zc>^1}6Wcn^nT)Q{8c?ef99f;6sFPQ4oTH+?!SMmS&jomto9yM`Ql@kCLszLD8cVs+ zj5KY*Xrr~UAG8&*uRyr{c8R$Iy z#<*{sHV&h)69sZ+3hKo@&?~MDS|&erAve9mKI!+-#7?1i@m6%OSL!iv#KZIf*svBb z#mmDK&rYW!yIxNJK!1-rU^)(azQf6RM(y+_w(OK=kS7VPy*N1G%jsQ#+8z~ElNj_h z_>-H=6-TjY+v#HzN!%O?*Lpg!?|4nshHIa+f!Ma*)K$B{({2m5y9vDRT5!56(4(?A zedY4Q=*&S+(Dd37G+O?I>wN$;dlH&GJGJj>0wdlxK2s-rr*5SlxQkwwdl~z9ykGmw z@jui*dCn1@ds;KqtBkv_LZ5KH*YL?d!vmMKVsOtZ!$Yq_EZ)ET{_nh)^zeRkAytQR z{VMU#CC-1wF^y+T;5A0k|89_D9q(;9jL=0`s9BC0yqnV4sl2@3U+E0(aRSuG(;DcR6h-4C6Z$|l9(}eJvpq&9#jBn2r}YPUjVELkE|76g!takIbFhet#W=Dk zJ;)3+Bq}atl`UD&-!O=+|0brtKop+n%U_QrC*7S~aU&G^%8;YYK~BeM@q?-LmAcIX z>NuyE9e0o$T}|C*CK=N}>R26^C2OIvQJ9%BJt`ZD@fwWD4fH&Z5Cd#MS!0DUnpqPp zw^7+>OCPbShCwa1~Z%>y!N^yA+yzpqW$#yHP^6HGpSdsqt<+oit{bJk*`#wGk^##V~w=5 z0OL5=`T$;}YKbOhKS+-67K&nD$Xunh&9WA@MOtgx_E_84&cV2NXdP?&W}QPGU=>}N z;>kxHL=WkbZ7>;x=_m%RrfY8^2(A|{1)UoIF}YnRu-)LlwpL zHI7vrv4kTRVI}61!J3Vd`AlraRL3yZ29nq6%?#d|v$o?ETJlPb*;<#^s=+&{LI>mu zymlF|nI$=@C`T2bD}8P{b>>1PB?o)Gd73oDCRQ?NKgiX5z^1)mF8>QFcb)%Fq3gNN zevT_~m}}$B+ib-mZXmje0A&%5rJMyqG>9uV0Ijl)_^FM+d{ly+T8t~37mP>_G!$p{pg7LYld40dHWR=g)jik9Sh zYJ&PN0oEladQJZHDKtQse}NhP0+h^M;kbHa9+VCbqiqJ`A`D-!0{>A1x)w6BV+) zwB)fqwfx5KF_-l=NW{xnkkgi5tVf9%50ZOL!naAZnB;9Wvb!?*-0xVG&*b~xQx|=M z*Zqo`=qqZWZ>W;KBg^!GI_O8%-(z`RvwY>-zL0w$-138CCG_FY{Wl1fV$WPl)CPtBKC@$-Z#E?3yNTA(W}T|PqG!XA0wW==;L%$@!hX);dOkq zb-`=si+b}w`>*sO$W5$W6vR(OI)&8-Nz@hG+;DkG;?(24(!8G15mXc$d>+iRlRxClPqa(?)?Dn(Llyf?$ z;z8t12QVA-W4$+CU{5@_DF zm<<}UtsTdAWP1zdga+)Z%WKu-mCEx9#h42U;D_eoy=CUpq~|^Rag>!KBzq;eCZ+N1 z3OXL3!*Bz??gDY=ab~=IT$vqQp?KIa8}ND8`h2{bWaZDGP07ao49ZM&<$_`M zz>OxeyA>?eDt;}qVEc>(LluCp*8v7leXvtynCWx!D-&&pu>@Pl_pYP{6~Y}qimFsk zez`62{Ay7*Dv7Pg&TrVk+B>)e+(7;CDh)hQ)3@V1-=*Y za7!YWs&vuJkKdQh@)&f^RWiTH;Gj06Lc4;_IdjMXkEc2wfI4-1^1$`Y&Q!_kkp(V+ zrOO6V$b}ZscjF=eWg$4Jvb4^sk3e-UR(&1WF-BP zI?+$5HtdnYo>BD7MOBpA^HZ%1+V;8d3H_)~)1ArJpZ7X3^Kp7mCTjJy7_E%9M9Zno z)EwG4bW8_;J?^NTRO{1Uy{s0YdV8Oz2Xk!Ex{(WRran-s!YC}F9#k`_3Fz5IqR$@= zUU#N?UI_xrI~W{sH}JkK$Pw07Co1Le5ewrZ=EO(LfNvwHCGZh*C?DX3KT{NX2)vTd zD)8OtE3lnj1MBHSuu@qn&sS#26P0oDP<*J~_=z3KS2kCg$n}+4at)<|TnT@x4EfAr zN>;g`;*UnGMb1U8Gb{dK2K+OB<-X)ruHw_3kSxl6NtU-V-ITw-3T_M+0qvdA!pdH|h_f${GgVmdI z5E@1^)wk$j$a1Xer|ePl(08GVa#L*s9y<{KbB>Zx+lWTSY5dQ>mA;ylUKqL1y{$@4 zq%AzeLFx}}hWd*qQZ4E^sMhn`QM-A*szIK#6*Xv4?gCEW{U2gqBXc&4H|d*zzW=;cgH7u(6q)aPjNVjO^i(BFe>OFMms&m7!5aZiGBwR zz-Qx{p4R-X7dLZ&C$0^KxEK7yY48(c;3uXSkI}F*smtWHlrw8t+MC_cubXU%fKirg z9!BH#zU8YaqpFz6S{%FC+|txK)Y8iuY6&7Yv6zh6HuAvdu#3+@W|`Q*oVGDA+`{41 zY_SH~j$3EiQmw0)mA0Y4d5l`p-2dCw~K-gQ!=IqAD3o zHF7Fd#SmvaHN+$z2Pe{b$;ZRl>U=?^@;g;Zt8fJUyA#fg!X@;{Q_)d>;w*}8NEz7l zRpIE=6Fxed3E!OU1l8G{rjP>!K^Q^4dOW&wQ-ySJb}|d0K8?KmwH>yC4@Mk zw6IAiD{N*YFm^HavF(shl2Jl90N-a1kGJ!9vrw4h^PsVpO^6nL5!P`2rReX?zz%7b?D%&~B__bs0meLm-5YO1@?r;l^`lh1cji}g4bIX|J-{uCX(+wf{Gp`~}) z(b{>$(ZIRKQQhg)yI%v(CfrfNIS0j-sg7*UAV+%VaEIs|jEYNuL&k!C!i#@}1%Je^ z;TB%}d34>6^XoW(SF#h1YCOJutRpLtLwe!{k*L8!rbmT4`prHX8$ZPH(%#SU*xm)b zzEpQNarf`v8+X-y{0eGX^u>I@F>8v3RScZPzLZX2X^c5%BQ}Oq2f$qMJWxs;W zKZ|ug$}e~iy2bIR4@ctztVDe{6wLN4GHSu(dxnwk=?wmAw21GL^SOq{ciEN&Z!ZJ>fj@VPo284#PI5xtepVi$gdg~eAIRsuK%@8x?4d{0 zgdgHZ-iH%>2UXV_;0&%ZE;G)P|2u^jah#YUnOxEVvVeQQB5Y@Df=9fL+RZXNjQPYG z)A2thg4_;bZ5&ZY5V^$hcpc-pgC~HhpNRJDR36PF9+^$;c{X0jY<$_dJZ~O)xAT1? zob@GaTaG`wg5#I*jKw^AA(^Xrc))W&MuZTHPN(O>6k?i*V8h2VPmDu#cQk6r!$D9C zrJg;M4A)Tfe20J}AB;NhKydy8$Z5jKWxW=jPz^F))j2~=yy)7n_-fPpsRn1Mf`?QN zzo`Tsc0taPi!)@RKJL$6Cw`QLJvx56Zq38U0fr;9O(ur=MlAh--0DlKA6YavzPo6< zW3}6ET6Hi%5_z;Q_*U=1@w^}_` z0B3UzWXx$|pTopZNnlBLQlH;K%(MY4mA6j6l6-O)*vmPfC?{Kf$9t;Au~Gr&BKfi>0- zHOY2hz3b2uy#!eFY~aKl<}qseiN;f7J-NLlV9Tb_g>)ERU3Zu+&5fx>4cITGj6OzQ zGJTm)oph1y)2Z=)rU&G6^jL4`Zu&tgu!mpKCo+}#$XO~Q2Ve|uge|-TZrLR8ivz*I zb>CDb#Ftt2MP) z{Jn6s5c>An>1*W=m&#C^svqH(JyDCP*VSz5N!6w9Q-6@3dPWWSicc?p5?rpq@WlR5 zXQQV-R;jM`qt8=2{JjQ>zgmgjRmBvU+W!agf`8M?;J)&SI`cE~f_L%xuF#+A6#b_T z$Wip0T18K)CCXxX9`)#H-N_NQR~pE_E7jyW=-*T!KUhY|E*C`) zC$Hj`bI{`;Gd^GjG``X+GWomDjMt23j3@Y4PskHK#wUD0j_)SRi#$Hf`Z4l`$>abJ zk_$}2Puz)LwuP~gv7Ql2o-l^K1ksFj(l>bx$F1S`DE!Je@{RF4e+%bH;B~f3nUvk+ z43nhX_?(55gHj33T~;|x?(j6Z!wcjNuSt#3e`tw++Fp4ib>sc@Q(j0zc(0@IU4xYO z(j?yd3`O$!c8l@tR^#W!@R<|v?e-}x>A}@qPE`WsH_8OrK%XuHx)BA{jdDeGuiQXQ zk=v?QQUbN_3f(T$OhaAPVxA|`_8Xz^;F02YfD{g5ViejjRxPSsAgQ_uPh zep@9^0=2FadVkLY`nG-7BhVe%>nUJd^3*V%dpa1JX9#}aOrtcM>}L8l>i@@$S$e7w ziywGY_ctHu#n7l|YW`*nG^-l3&9=sRGtf9<&Nfo1XMN@VO>22*6tQSVJxf-zo27yY z@5>Ce(2EF7i;qAuJm3uX#4xZ7^Ta#iEb+ED zmFEPD*Tu2oWpM=O86=(r>u?lx(gX0ecY=M`B-Y?HOT+apATAfPfq+OWE*9+!MF@nB0>xY4CN2uC#1x^Pn1rVCCZUkH3LWM7f*=M9KZL=;3!#&cDm36a zl*cd4&lSo5+uU$Y1D7#SxaDkvKUq!KLjRt{&R>LJc+~yiu(xtP#vi=qEDWzblXJJz z;au;O9Lt<<9JA=xGsbz5y8j_~)mt4^oNFCOA&| z^iB$JT(h@v9OBop$)4A-lpO6eqJ^ROWSz;u)Fb~=oUCjnJT%kRA0At4dZ<;iCzC;81{Dtj!5^JcE&ZAm74nA5hn^)idnYFy_29Zq)RzDF-9|iVz0oHvS zY?!`S^tRxM>w_+?01h`lsN4)7k}V)_KY^$HiwxyC(5U;sqsGCFTnU18KKakd*z6Io zLcE>mx`L)_LtdpJmbt30`&CVPhm|AaUkLtoW>^h&ItzdC^|O0tUZ;=Naq7&wsI*1H zO9+EQHO1UOoozXMDsNA)Anfx%`hIn$M_zL>Ep@3cc)4RG;Em-q9~hb8iGlW^U-${w z-pPh;?584^NH%5*y_2Kir!J@Ny%0phEaN3M{E0EixJORrrZEIf{65BIdQe^>Gjkps z(Fypihd?>(F{+^uQo-12l;kW0=+B%TBt%AIo#9XSalu$dJueIl!~*&|&!&&~RO1Q! z*}M7};?ohvd3`8#zCoym1TataGyc^3&>OlJeZ{+j`|At`w}TNyojyiyL!G`AeTteJ z@p=n*xoyy$=xFSKo4bqScj|q4PCxh%eR*DQ=Dl92bI6>j4q7M zjIM0!&c5#Gzjx=j9&mp9qGvIf8qNrwKZ@5I%j-?xwWf0JnMR^Mi}yC$IH1op4uPvW zLcQmRzSKz8m-D_?G1jm>lChQ%!S)FDuLX-AX`Is6@mZtcZmg&0;b!BW4gyl&g970} zYMqBsQaNsX(a*s1xNKNZ=oaA&`-9C-Z@l7KzC(586aBEi;y+17AwvbnZx|)uk(2DuFBj@eT=*Jk$ z?`#D7f;j(FxXmG)ca~9xWjRI#Y))l!E?mO-C=V=zBNNV8%2+|2Y?aXfeUOH%H)2~u z_SH8R!HJ#^X3aYm7&XmMmP?Fk98=j`OJDC;&K1w|6FL5X(F{&wTkKg^I5&OaG7N;v zI20dsBs`q4aI43mIWmsg+c!b}655-as= z2i!%0+@_1lwhIoU3!hk|YaoGUxe%?_t1JM4*^MO}lMZo`xhW{DQjJb=E1Wz{!&dFZJZq|3RZx6@pWqmK0<=telcfu3h0e&Hou@4;1 zpJ>S(f}e5--dQqC_~Y==&Js6Wfva{0=E`Gc>erwjJ~L#{&nA6G1Zzi&pS3gbS$EcZ z!7A)Sy(@qk?NI7aV_;VVQ;V8Pc72v5H+i!H))jC_*ILTrD^+3CvPPlq6m4k&uClpx zJ$Rqkf5rxu@zkWe7@*yj*3{2BTK8MJ<6rf)9-%IkVhN;n6==Og|A^a`5m=hB)@NWc zUsz@^=D=)Rz%tbO#u9G*Y*_-sV>z{~H7sMP+ik)>^Nv_>MUjjMwntD$j<9+-#$$=H zDwg#ezmc(l?dz!bt;VW(N2JxX#PE0nBhI(9z-w`UOLDQ#P1Gw=Ww23YG&zUKYgn+E zHn!V|d7TWwcT6NP?m8GLQN+8^1OJT@kGAfv4yb}F6UM-Pur+d>;Rvh z$k+=;Wgnc4ebgnBtVM~hi^H8Q!M>8PXi9TjS)NmYbChS4<(y@F%PlAllMHG&o_8omJtxuE2L*hVQ?G^)YqX z`+P-z>9v|4D&LI zTF){VmvdlRPUOBAMaEz#_sT$!KfSomI?(^D36-FlKAzV{IN2|$3jM`=aD#c`Ec3|` z=9)b|4)=Rz%4f`^cjygz5hhs*oXUeR#&?(<=@8nKIk6^s7Uh|#io-R_556)xd;IZX zOy>V@%P&8qw9`x5qjv)P*N%KdYq$q3@Kc)Nr!+KD;nH13 z59E|y5&xtlcD*2WJ~#b-avJ;LUGCO%;D_YE=g30OxeSbSjI_pjMigTWV+A8z_c!LF zH!@SVqn%}fXY#;akWoeZj2_x6y}SOHT*rNQo2iUDtlwsN8+OBOy*(JH4)kX1#CkVy zq8?&)EWOWLx75KGS7Kfj!wme0P_zbC<8Q3Tu5X1myVq!s4eySAK|k{;-j`}jY{c=vgD?A_*F*EmKq9sg}O+3#T3edB*V#=HN&4?Fjd9h|*` zrFZ;%uJ@RqkN>mupn>2WI>Ydd|LpTV??1!)KE31LbN%}oHlAnWIPdVjrgwO^S^pXT z?)!P1cc1CoXW{sN2Pp*Bz0dLP`*#@sZ2Ql4?=e4z$@1UxP;N4Rj-T6p?)%R+?{ofr z^mEA*eE<0I`2K!ALac`4yyyS-@z1k*|3`Z6za9DW6}(6PyXJlF&wo}H|9*ylxBb7% YpU3>)TfN)<_kaJ>v;OyI{Qvj<7cm`$mH+?% literal 0 HcmV?d00001 From 23aeaa88b2153f26ec7f1b0bb58b4dd09f1b97b1 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 8 Dec 2015 20:52:24 +0100 Subject: [PATCH 26/57] Fixes random failures of org.apache.elasticsearch.test.rest.RestIT RestTable: ignores right padding for the last cell of a column. --- .../rest/action/support/RestTable.java | 21 +++++++++++++------ .../test/cat.aliases/10_basic.yaml | 2 +- .../test/cat.allocation/10_basic.yaml | 4 ++-- .../test/cat.count/10_basic.yaml | 5 ++--- .../test/cat.nodes/10_basic.yaml | 2 +- 5 files changed, 21 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java index 549624059fa..3808e58a527 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java +++ b/core/src/main/java/org/elasticsearch/rest/action/support/RestTable.java @@ -79,8 +79,9 @@ public class RestTable { if (verbose) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); - pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out); - if (col != lastHeader) { + boolean isLastColumn = col == lastHeader; + pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out, isLastColumn); + if (!isLastColumn) { out.append(" "); } } @@ -89,8 +90,9 @@ public class RestTable { for (int row = 0; row < table.getRows().size(); row++) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); - pad(table.getAsMap().get(header.name).get(row), width[col], request, out); - if (col != lastHeader) { + boolean isLastColumn = col == lastHeader; + pad(table.getAsMap().get(header.name).get(row), width[col], request, out, isLastColumn); + if (!isLastColumn) { out.append(" "); } } @@ -240,6 +242,10 @@ public class RestTable { } public static void pad(Table.Cell cell, int width, RestRequest request, UTF8StreamWriter out) throws IOException { + pad(cell, width, request, out, false); + } + + public static void pad(Table.Cell cell, int width, RestRequest request, UTF8StreamWriter out, boolean isLast) throws IOException { String sValue = renderValue(request, cell.value); int length = sValue == null ? 0 : sValue.length(); byte leftOver = (byte) (width - length); @@ -258,8 +264,11 @@ public class RestTable { if (sValue != null) { out.append(sValue); } - for (byte i = 0; i < leftOver; i++) { - out.append(" "); + // Ignores the leftover spaces if the cell is the last of the column. + if (!isLast) { + for (byte i = 0; i < leftOver; i++) { + out.append(" "); + } } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml index 73285eaa656..3ee33b0a67b 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/10_basic.yaml @@ -185,6 +185,6 @@ - match: $body: | /^ - index \s+ alias \s+ \n + index \s+ alias \n test \s+ test_1 \n $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index c0a5a079e5d..3537da73c81 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -156,7 +156,7 @@ disk.percent \s+ host \s+ ip \s+ - node \s+ + node \n ( \s* #allow leading spaces to account for right-justified text @@ -199,7 +199,7 @@ $body: | /^ disk.percent \s+ - node \s+ + node \n ( \s+\d* \s+ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml index 30199466616..87ca75a6092 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.count/10_basic.yaml @@ -19,7 +19,7 @@ - match: $body: | /# epoch timestamp count - ^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \n $/ + ^ \d+ \s \d{2}:\d{2}:\d{2} \s 0 \n$/ - do: index: @@ -72,5 +72,4 @@ - match: $body: | /^ epoch \s+ timestamp \s+ count \n - \d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \s+ \n $/ - + \d+ \s+ \d{2}:\d{2}:\d{2} \s+ \d+ \n $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml index 77aaecf51c2..2531e6ef025 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yaml @@ -15,7 +15,7 @@ - match: $body: | - /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \s+ \n + /^ host \s+ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load \s+ node\.role \s+ master \s+ name \n (\S+ \s+ (\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ \d* \s+ (-)?\d*(\.\d+)? \s+ [-dc] \s+ [-*mx] \s+ (\S+\s?)+ \n)+ $/ - do: From 2e299860e5ecc98c1f566d2f15625d26a0e0dd51 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Dec 2015 22:45:21 -0500 Subject: [PATCH 27/57] refactor mustache to lang-mustache plugin. No rest tests yet. --- .../gradle/plugin/PluginBuildPlugin.groovy | 1 - buildSrc/version.properties | 1 - core/build.gradle | 3 -- .../index/query/TemplateQueryParser.java | 10 ----- .../RestRenderSearchTemplateAction.java | 3 +- .../RestDeleteSearchTemplateAction.java | 4 +- .../template/RestGetSearchTemplateAction.java | 4 +- .../template/RestPutSearchTemplateAction.java | 4 +- .../elasticsearch/script/ScriptModule.java | 9 ---- .../org/elasticsearch/script/Template.java | 16 ++++--- .../elasticsearch/index/IndexModuleTests.java | 2 - .../elasticsearch/script/FileScriptTests.java | 6 --- .../script/ScriptModesTests.java | 23 ++-------- .../script/ScriptServiceTests.java | 8 +--- modules/lang-mustache/build.gradle | 36 +++++++++++++++ .../mustache/JsonEscapingMustacheFactory.java | 0 .../script/mustache/MustachePlugin.java | 40 +++++++++++++++++ .../mustache/MustacheScriptEngineService.java | 0 .../query/TemplateQueryBuilderTests.java | 24 +++++++++- .../tests/RenderSearchTemplateTests.java | 15 +++++-- .../messy/tests/SuggestSearchTests.java | 17 +++++-- .../tests}/TemplateQueryParserTests.java | 7 ++- .../messy/tests/TemplateQueryTests.java | 16 ++++++- .../messy/tests/package-info.java | 44 +++++++++++++++++++ .../mustache/MustacheScriptEngineTests.java | 0 .../script/mustache/MustacheTests.java | 0 .../scripts/full-query-template.mustache | 0 .../config/scripts/storedTemplate.mustache | 0 settings.gradle | 1 + .../index/query/AbstractQueryTestCase.java | 8 ---- .../script/MockScriptEngine.java | 0 31 files changed, 209 insertions(+), 93 deletions(-) create mode 100644 modules/lang-mustache/build.gradle rename {core => modules/lang-mustache}/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java (100%) create mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java rename {core => modules/lang-mustache}/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java (100%) rename {core => modules/lang-mustache}/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java (84%) rename core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java => modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java (93%) rename core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java => modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java (99%) rename {core/src/test/java/org/elasticsearch/index/query => modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests}/TemplateQueryParserTests.java (97%) rename core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java => modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java (97%) create mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java rename {core => modules/lang-mustache}/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java (100%) rename {core => modules/lang-mustache}/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java (100%) rename {core/src/test/resources/org/elasticsearch/index/query => modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests}/config/scripts/full-query-template.mustache (100%) rename {core/src/test/resources/org/elasticsearch/index/query => modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests}/config/scripts/storedTemplate.mustache (100%) rename {core/src/test => test-framework/src/main}/java/org/elasticsearch/index/query/AbstractQueryTestCase.java (98%) rename {core/src/test => test-framework/src/main}/java/org/elasticsearch/script/MockScriptEngine.java (100%) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index eea2041052a..0d936ab0e15 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -65,7 +65,6 @@ public class PluginBuildPlugin extends BuildPlugin { // with a full elasticsearch server that includes optional deps provided "com.spatial4j:spatial4j:${project.versions.spatial4j}" provided "com.vividsolutions:jts:${project.versions.jts}" - provided "com.github.spullara.mustache.java:compiler:${project.versions.mustache}" provided "log4j:log4j:${project.versions.log4j}" provided "log4j:apache-log4j-extras:${project.versions.log4j}" provided "org.slf4j:slf4j-api:${project.versions.slf4j}" diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 1a982d30676..fc4ef40d6d5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -4,7 +4,6 @@ lucene = 5.4.0-snapshot-1715952 # optional dependencies spatial4j = 0.5 jts = 1.13 -mustache = 0.9.1 jackson = 2.6.2 log4j = 1.2.17 slf4j = 1.6.2 diff --git a/core/build.gradle b/core/build.gradle index 3db5097ea7a..fd8a0c10f5a 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -74,9 +74,6 @@ dependencies { compile "com.spatial4j:spatial4j:${versions.spatial4j}", optional compile "com.vividsolutions:jts:${versions.jts}", optional - // templating - compile "com.github.spullara.mustache.java:compiler:${versions.mustache}", optional - // logging compile "log4j:log4j:${versions.log4j}", optional compile "log4j:apache-log4j-extras:${versions.log4j}", optional diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java index 3c72adfa0ac..7f64eb3ccf2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java @@ -18,25 +18,15 @@ */ package org.elasticsearch.index.query; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.*; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.search.builder.SearchSourceBuilder; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.common.Strings.hasLength; - /** * In the simplest case, parse template string and variables from the request, * compile the template and execute the template against the given variables. diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java index a25754d8752..5ebec7130df 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/validate/template/RestRenderSearchTemplateAction.java @@ -41,7 +41,6 @@ import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import java.util.Map; @@ -89,7 +88,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler { throw new ElasticsearchParseException("failed to parse request. unknown field [{}] of type [{}]", currentFieldName, token); } } - template = new Template(templateId, ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params); + template = new Template(templateId, ScriptType.INDEXED, Template.DEFAULT_LANG, null, params); } renderSearchTemplateRequest = new RenderSearchTemplateRequest(); renderSearchTemplateRequest.template(template); diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestDeleteSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestDeleteSearchTemplateAction.java index 9b205a8070f..3d0daf37b63 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestDeleteSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestDeleteSearchTemplateAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.script.RestDeleteIndexedScriptAction; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.script.Template; import static org.elasticsearch.rest.RestRequest.Method.DELETE; @@ -38,6 +38,6 @@ public class RestDeleteSearchTemplateAction extends RestDeleteIndexedScriptActio @Override protected String getScriptLang(RestRequest request) { - return MustacheScriptEngineService.NAME; + return Template.DEFAULT_LANG; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestGetSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestGetSearchTemplateAction.java index 39be6a53370..0e8aa357fcd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestGetSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestGetSearchTemplateAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.script.RestGetIndexedScriptAction; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.script.Template; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -42,7 +42,7 @@ public class RestGetSearchTemplateAction extends RestGetIndexedScriptAction { @Override protected String getScriptLang(RestRequest request) { - return MustacheScriptEngineService.NAME; + return Template.DEFAULT_LANG; } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java index a734ce37ca2..0d23645afda 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/template/RestPutSearchTemplateAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.script.RestPutIndexedScriptAction; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.script.Template; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; @@ -59,6 +59,6 @@ public class RestPutSearchTemplateAction extends RestPutIndexedScriptAction { @Override protected String getScriptLang(RestRequest request) { - return MustacheScriptEngineService.NAME; + return Template.DEFAULT_LANG; } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java index 3c19826a190..f3bdad64b66 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -22,9 +22,7 @@ package org.elasticsearch.script; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import java.util.ArrayList; import java.util.HashMap; @@ -75,13 +73,6 @@ public class ScriptModule extends AbstractModule { Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); multibinder.addBinding().to(NativeScriptEngineService.class); - - try { - Class.forName("com.github.mustachejava.Mustache"); - multibinder.addBinding().to(MustacheScriptEngineService.class).asEagerSingleton(); - } catch (Throwable t) { - Loggers.getLogger(ScriptService.class, settings).debug("failed to load mustache", t); - } for (Class scriptEngine : scriptEngines) { multibinder.addBinding().to(scriptEngine).asEagerSingleton(); diff --git a/core/src/main/java/org/elasticsearch/script/Template.java b/core/src/main/java/org/elasticsearch/script/Template.java index 4419d6f5093..c9bb9085051 100644 --- a/core/src/main/java/org/elasticsearch/script/Template.java +++ b/core/src/main/java/org/elasticsearch/script/Template.java @@ -29,13 +29,15 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import java.io.IOException; import java.util.Collections; import java.util.Map; public class Template extends Script { + + /** Default templating language */ + public static final String DEFAULT_LANG = "mustache"; private XContentType contentType; @@ -51,7 +53,7 @@ public class Template extends Script { * The inline template. */ public Template(String template) { - super(template, MustacheScriptEngineService.NAME); + super(template, DEFAULT_LANG); } /** @@ -73,7 +75,7 @@ public class Template extends Script { */ public Template(String template, ScriptType type, @Nullable String lang, @Nullable XContentType xContentType, @Nullable Map params) { - super(template, type, lang == null ? MustacheScriptEngineService.NAME : lang, params); + super(template, type, lang == null ? DEFAULT_LANG : lang, params); this.contentType = xContentType; } @@ -120,16 +122,16 @@ public class Template extends Script { } public static Script parse(Map config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) { - return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher); + return new TemplateParser(Collections.emptyMap(), DEFAULT_LANG).parse(config, removeMatchedEntries, parseFieldMatcher); } public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { - return new TemplateParser(Collections.emptyMap(), MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); + return new TemplateParser(Collections.emptyMap(), DEFAULT_LANG).parse(parser, parseFieldMatcher); } @Deprecated public static Template parse(XContentParser parser, Map additionalTemplateFieldNames, ParseFieldMatcher parseFieldMatcher) throws IOException { - return new TemplateParser(additionalTemplateFieldNames, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher); + return new TemplateParser(additionalTemplateFieldNames, DEFAULT_LANG).parse(parser, parseFieldMatcher); } @Deprecated @@ -172,7 +174,7 @@ public class Template extends Script { @Override protected Template createSimpleScript(XContentParser parser) throws IOException { - return new Template(String.valueOf(parser.objectText()), ScriptType.INLINE, MustacheScriptEngineService.NAME, contentType, null); + return new Template(String.valueOf(parser.objectText()), ScriptType.INLINE, DEFAULT_LANG, contentType, null); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 2b76d03952d..c2306132930 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -59,7 +59,6 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.engine.MockEngineFactory; @@ -102,7 +101,6 @@ public class IndexModuleTests extends ESTestCase { BigArrays bigArrays = new BigArrays(recycler, circuitBreakerService); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndicesFieldDataCacheListener(circuitBreakerService), threadPool); Set scriptEngines = new HashSet<>(); - scriptEngines.add(new MustacheScriptEngineService(settings)); scriptEngines.addAll(Arrays.asList(scriptEngineServices)); ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList())); IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.emptySet(), new NamedWriteableRegistry()); diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index daefc205933..fc888c79a8c 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -19,12 +19,9 @@ package org.elasticsearch.script; import org.elasticsearch.common.ContextAndHeaderHolder; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESTestCase; -import org.junit.Test; import java.nio.file.Files; import java.nio.file.Path; @@ -32,9 +29,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.containsString; - // TODO: these really should just be part of ScriptService tests, there is nothing special about them public class FileScriptTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 38ab78bff4c..3e476d2bebb 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -45,7 +44,7 @@ import static org.hamcrest.Matchers.containsString; // TODO: this needs to be a base test class, and all scripting engines extend it public class ScriptModesTests extends ESTestCase { private static final Set ALL_LANGS = unmodifiableSet( - newHashSet(MustacheScriptEngineService.NAME, "custom", "test")); + newHashSet("custom", "test")); static final String[] ENABLE_VALUES = new String[]{"on", "true", "yes", "1"}; static final String[] DISABLE_VALUES = new String[]{"off", "false", "no", "0"}; @@ -73,7 +72,6 @@ public class ScriptModesTests extends ESTestCase { scriptContextRegistry = new ScriptContextRegistry(contexts.values()); scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]); scriptEngines = buildScriptEnginesByLangMap(newHashSet( - new MustacheScriptEngineService(Settings.EMPTY), //add the native engine just to make sure it gets filtered out new NativeScriptEngineService(Settings.EMPTY, Collections.emptyMap()), new CustomScriptEngineService())); @@ -93,8 +91,8 @@ public class ScriptModesTests extends ESTestCase { public void assertAllSettingsWereChecked() { if (assertScriptModesNonNull) { assertThat(scriptModes, notNullValue()); - //3 is the number of engines (native excluded), custom is counted twice though as it's associated with two different names - int numberOfSettings = 3 * ScriptType.values().length * scriptContextRegistry.scriptContexts().size(); + //2 is the number of engines (native excluded), custom is counted twice though as it's associated with two different names + int numberOfSettings = 2 * ScriptType.values().length * scriptContextRegistry.scriptContexts().size(); assertThat(scriptModes.scriptModes.size(), equalTo(numberOfSettings)); if (assertAllSettingsWereChecked) { assertThat(checkedSettings.size(), equalTo(numberOfSettings)); @@ -190,21 +188,6 @@ public class ScriptModesTests extends ESTestCase { assertScriptModes(ScriptMode.SANDBOX, ALL_LANGS, new ScriptType[]{ScriptType.INLINE}, complementOf); } - public void testInteractionBetweenGenericAndEngineSpecificSettings() { - Settings.Builder builder = Settings.builder().put("script.inline", randomFrom(DISABLE_VALUES)) - .put(specificEngineOpSettings(MustacheScriptEngineService.NAME, ScriptType.INLINE, ScriptContext.Standard.AGGS), randomFrom(ENABLE_VALUES)) - .put(specificEngineOpSettings(MustacheScriptEngineService.NAME, ScriptType.INLINE, ScriptContext.Standard.SEARCH), randomFrom(ENABLE_VALUES)); - Set mustacheLangSet = singleton(MustacheScriptEngineService.NAME); - Set allButMustacheLangSet = new HashSet<>(ALL_LANGS); - allButMustacheLangSet.remove(MustacheScriptEngineService.NAME); - this.scriptModes = new ScriptModes(scriptEngines, scriptContextRegistry, builder.build()); - assertScriptModes(ScriptMode.ON, mustacheLangSet, new ScriptType[]{ScriptType.INLINE}, ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH); - assertScriptModes(ScriptMode.OFF, mustacheLangSet, new ScriptType[]{ScriptType.INLINE}, complementOf(ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH)); - assertScriptModesAllOps(ScriptMode.OFF, allButMustacheLangSet, ScriptType.INLINE); - assertScriptModesAllOps(ScriptMode.SANDBOX, ALL_LANGS, ScriptType.INDEXED); - assertScriptModesAllOps(ScriptMode.ON, ALL_LANGS, ScriptType.FILE); - } - private void assertScriptModesAllOps(ScriptMode expectedScriptMode, Set langs, ScriptType... scriptTypes) { assertScriptModes(expectedScriptMode, langs, scriptTypes, scriptContexts); } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index aa7df3f3ebc..23cada02c6c 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.watcher.ResourceWatcherService; @@ -73,8 +72,7 @@ public class ScriptServiceTests extends ESTestCase { .put("path.conf", genericConfigFolder) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); - scriptEngineServices = newHashSet(new TestEngineService(), - new MustacheScriptEngineService(baseSettings)); + scriptEngineServices = newHashSet(new TestEngineService()); scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(scriptEngineServices); //randomly register custom script contexts int randomInt = randomIntBetween(0, 3); @@ -199,10 +197,6 @@ public class ScriptServiceTests extends ESTestCase { createFileScripts("groovy", "mustache", "test"); for (ScriptContext scriptContext : scriptContexts) { - //mustache engine is sandboxed, all scripts are enabled by default - assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext, contextAndHeaders); - assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext, contextAndHeaders); - assertCompileAccepted(MustacheScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext, contextAndHeaders); //custom engine is sandboxed, all scripts are enabled by default assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext, contextAndHeaders); assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext, contextAndHeaders); diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle new file mode 100644 index 00000000000..b2e11c1c299 --- /dev/null +++ b/modules/lang-mustache/build.gradle @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Mustache scripting integration for Elasticsearch' + classname 'org.elasticsearch.script.mustache.MustachePlugin' +} + +dependencies { + compile "com.github.spullara.mustache.java:compiler:0.9.1" +} + +//compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' + +integTest { + cluster { + systemProperty 'es.script.inline', 'on' + systemProperty 'es.script.indexed', 'on' + } +} diff --git a/core/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java similarity index 100% rename from core/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java new file mode 100644 index 00000000000..3f6f6e00716 --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptModule; + +public class MustachePlugin extends Plugin { + + @Override + public String name() { + return "lang-mustache"; + } + + @Override + public String description() { + return "Mustache scripting integration for Elasticsearch"; + } + + public void onModule(ScriptModule module) { + module.addScriptEngine(MustacheScriptEngineService.class); + } +} diff --git a/core/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java similarity index 100% rename from core/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java similarity index 84% rename from core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index cdf0c5d1501..9ec90196789 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -19,19 +19,28 @@ package org.elasticsearch.index.query; +import com.carrotsearch.randomizedtesting.generators.RandomInts; + import org.apache.lucene.search.Query; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.AbstractQueryTestCase; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TemplateQueryBuilder; import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.script.Template; import org.junit.BeforeClass; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.Random; +@ESTestCase.AwaitsFix(bugUrl = "nopush") public class TemplateQueryBuilderTests extends AbstractQueryTestCase { /** @@ -41,7 +50,20 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase createQuery(Random r) { + switch (RandomInts.randomIntBetween(r, 0, 2)) { + case 0: + return new MatchAllQueryBuilder(); + case 1: + return new IdsQueryBuilder(); + case 2: + return EmptyQueryBuilder.PROTOTYPE; + default: + throw new UnsupportedOperationException(); + } } @Override diff --git a/core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java similarity index 93% rename from core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index e819286dcae..84994096fa6 100644 --- a/core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.validate; +package org.elasticsearch.messy.tests; import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse; import org.elasticsearch.common.bytes.BytesArray; @@ -25,11 +25,15 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; +import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; +import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -37,10 +41,15 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -@ESIntegTestCase.SuiteScopeTestCase -public class RenderSearchTemplateIT extends ESIntegTestCase { +@ESIntegTestCase.SuiteScopeTestCase @ESIntegTestCase.AwaitsFix(bugUrl = "nopush") +public class RenderSearchTemplateTests extends ESIntegTestCase { private static final String TEMPLATE_CONTENTS = "{\"size\":\"{{size}}\",\"query\":{\"match\":{\"foo\":\"{{value}}\"}},\"aggs\":{\"objects\":{\"terms\":{\"field\":\"{{value}}\",\"size\":\"{{size}}\"}}}}"; + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MustachePlugin.class); + } + @Override protected void setupSuiteScopeCluster() throws Exception { client().preparePutIndexedScript(MustacheScriptEngineService.NAME, "index_template_1", "{ \"template\": " + TEMPLATE_CONTENTS + " }").get(); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java index 1850abc8595..a0699a35534 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.suggest; +package org.elasticsearch.messy.tests; import org.elasticsearch.ElasticsearchException; @@ -33,6 +33,10 @@ import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.mustache.MustachePlugin; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.DirectCandidateGenerator; @@ -46,6 +50,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -76,7 +81,13 @@ import static org.hamcrest.Matchers.nullValue; * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ -public class SuggestSearchIT extends ESIntegTestCase { +public class SuggestSearchTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MustachePlugin.class); + } + // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { createIndex("test"); @@ -609,7 +620,7 @@ public class SuggestSearchIT extends ESIntegTestCase { } private List readMarvelHeroNames() throws IOException, URISyntaxException { - return Files.readAllLines(PathUtils.get(SuggestSearchIT.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8); + return Files.readAllLines(PathUtils.get(Suggest.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8); } public void testSizePararm() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index d62a11077ec..940f4e7d134 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.messy.tests; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -45,6 +45,8 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TemplateQueryParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -74,6 +76,7 @@ import static org.hamcrest.Matchers.containsString; * Test parsing and executing a template request. */ // NOTE: this can't be migrated to ESSingleNodeTestCase because of the custom path.conf +@ESTestCase.AwaitsFix(bugUrl = "nopush") public class TemplateQueryParserTests extends ESTestCase { private Injector injector; @@ -89,7 +92,7 @@ public class TemplateQueryParserTests extends ESTestCase { .build(); final Client proxy = (Client) Proxy.newProxyInstance( Client.class.getClassLoader(), - new Class[]{Client.class}, (proxy1, method, args) -> { + new Class[]{Client.class}, (proxy1, method, args) -> { throw new UnsupportedOperationException("client is just a dummy"); }); Index index = new Index("test"); diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java index d4816f8d334..70298266df9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.messy.tests; import org.elasticsearch.action.index.IndexRequest.OpType; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -31,9 +31,14 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.TemplateQueryBuilder; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; +import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -41,6 +46,8 @@ import org.junit.Before; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -58,7 +65,12 @@ import static org.hamcrest.Matchers.is; * Full integration test of the template query plugin. */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) -public class TemplateQueryIT extends ESIntegTestCase { +public class TemplateQueryTests extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(MustachePlugin.class); + } @Before public void setup() throws IOException { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java new file mode 100644 index 00000000000..9b7b6f55c6e --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +/** + * This package contains tests that use mustache to test what looks + * to be unrelated functionality, or functionality that should be + * tested with a mock instead. Instead of doing an epic battle + * with these tests, they are temporarily moved here to the mustache + * module's tests, but that is likely not where they belong. Please + * help by cleaning them up and we can remove this package! + * + *

+ */ +/* List of renames that took place: +renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java -> modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +renamed: core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +renamed: core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java +renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java + */ + +package org.elasticsearch.messy.tests; \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java similarity index 100% rename from core/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java diff --git a/core/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java similarity index 100% rename from core/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java diff --git a/core/src/test/resources/org/elasticsearch/index/query/config/scripts/full-query-template.mustache b/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache similarity index 100% rename from core/src/test/resources/org/elasticsearch/index/query/config/scripts/full-query-template.mustache rename to modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/full-query-template.mustache diff --git a/core/src/test/resources/org/elasticsearch/index/query/config/scripts/storedTemplate.mustache b/modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache similarity index 100% rename from core/src/test/resources/org/elasticsearch/index/query/config/scripts/storedTemplate.mustache rename to modules/lang-mustache/src/test/resources/org/elasticsearch/messy/tests/config/scripts/storedTemplate.mustache diff --git a/settings.gradle b/settings.gradle index 0791c3d1752..b400ada42be 100644 --- a/settings.gradle +++ b/settings.gradle @@ -11,6 +11,7 @@ List projects = [ 'test-framework', 'modules:lang-expression', 'modules:lang-groovy', + 'modules:lang-mustache', 'plugins:analysis-icu', 'plugins:analysis-kuromoji', 'plugins:analysis-phonetic', diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/test-framework/src/main/java/org/elasticsearch/index/query/AbstractQueryTestCase.java similarity index 98% rename from core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java rename to test-framework/src/main/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index aebf00e0728..72bbe3ce509 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/test-framework/src/main/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -84,7 +84,6 @@ import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.*; import org.elasticsearch.script.Script.ScriptParseException; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -205,15 +204,8 @@ public abstract class AbstractQueryTestCase> MockScriptEngine mockScriptEngine = new MockScriptEngine(); Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); multibinder.addBinding().toInstance(mockScriptEngine); - try { - Class.forName("com.github.mustachejava.Mustache"); - } catch(ClassNotFoundException e) { - throw new IllegalStateException("error while loading mustache", e); - } - MustacheScriptEngineService mustacheScriptEngineService = new MustacheScriptEngineService(settings); Set engines = new HashSet<>(); engines.add(mockScriptEngine); - engines.add(mustacheScriptEngineService); List customContexts = new ArrayList<>(); bind(ScriptContextRegistry.class).toInstance(new ScriptContextRegistry(customContexts)); try { diff --git a/core/src/test/java/org/elasticsearch/script/MockScriptEngine.java b/test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java similarity index 100% rename from core/src/test/java/org/elasticsearch/script/MockScriptEngine.java rename to test-framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java From 18698b9308e209f3dd9d0d02a7075436b66cc89a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Dec 2015 18:19:44 -0800 Subject: [PATCH 28/57] Build: Add build plugin for "messy" tests We have some tests which have crazy dependencies, like on other plugins. This change adds a "messy-test" gradle plugin which can be used for qa projects that these types of tests can run in. What this adds over regular standalone tests is the plugin properties and metadata on the classpath, so that the plugins are properly initialized. --- .../gradle/test/ClusterFormationTasks.groovy | 2 +- .../gradle/test/MessyTestPlugin.groovy | 63 +++++++++++++++++++ .../elasticsearch.messy-test.properties | 20 ++++++ 3 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy create mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index fff6082b3e5..e62175b743e 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -525,7 +525,7 @@ class ClusterFormationTasks { } } - static String pluginTaskName(String action, String name, String suffix) { + public static String pluginTaskName(String action, String name, String suffix) { // replace every dash followed by a character with just the uppercase character String camelName = name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) } return action + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + suffix diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy new file mode 100644 index 00000000000..1cca2c5aa49 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.test + +import org.elasticsearch.gradle.plugin.PluginBuildPlugin +import org.gradle.api.Project +import org.gradle.api.artifacts.Dependency +import org.gradle.api.artifacts.ProjectDependency +import org.gradle.api.tasks.Copy + +/** + * A plugin to run messy tests, which are generally tests that depend on plugins. + * + * This plugin will add the same test configuration as standalone tests, except + * also add the plugin-metadata and properties files for each plugin project + * dependency. + */ +class MessyTestPlugin extends StandaloneTestPlugin { + @Override + public void apply(Project project) { + super.apply(project) + + project.configurations.testCompile.dependencies.all { Dependency dep -> + // this closure is run every time a compile dependency is added + if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) { + project.gradle.projectsEvaluated { + addPluginResources(project, dep.dependencyProject) + } + } + } + } + + private static addPluginResources(Project project, Project pluginProject) { + String outputDir = "generated-resources/${pluginProject.name}" + String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata") + Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class) + copyPluginMetadata.into(outputDir) + copyPluginMetadata.from(pluginProject.tasks.pluginProperties) + copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata')) + project.sourceSets.test.output.dir(outputDir, builtBy: taskName) + + // add each generated dir to the test classpath in IDEs + //project.eclipse.classpath.sourceSets = [project.sourceSets.test] + project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]] + } +} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties new file mode 100644 index 00000000000..507a0f85a04 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.messy-test.properties @@ -0,0 +1,20 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +implementation-class=org.elasticsearch.gradle.test.MessyTestPlugin From 907d3cf878d820cebf38fa236201daa22e8f28d9 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Dec 2015 20:26:45 -0800 Subject: [PATCH 29/57] move template query builder tests back to core (using mockscript) --- .../index/query/TemplateQueryBuilderTests.java | 5 ++--- .../org/elasticsearch/script/MockScriptEngine.java | 11 +++++++++-- 2 files changed, 11 insertions(+), 5 deletions(-) rename {modules/lang-mustache => core}/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java (98%) diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java similarity index 98% rename from modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java rename to core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index 9ec90196789..aa5e9142041 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -40,7 +40,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Random; -@ESTestCase.AwaitsFix(bugUrl = "nopush") public class TemplateQueryBuilderTests extends AbstractQueryTestCase { /** @@ -52,7 +51,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase createQuery(Random r) { switch (RandomInts.randomIntBetween(r, 0, 2)) { case 0: @@ -73,7 +72,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase vars) { - return null; + return new AbstractExecutableScript() { + @Override + public Object run() { + return new BytesArray((String)compiledScript.compiled()); + } + }; } @Override From a6fe9a4212f2aec92e526dcf4b564fd67347860b Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Dec 2015 23:42:18 -0500 Subject: [PATCH 30/57] move test back now that its fixed --- .../java/org/elasticsearch/index/query/AbstractQueryTestCase.java | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {test-framework/src/main => core/src/test}/java/org/elasticsearch/index/query/AbstractQueryTestCase.java (100%) diff --git a/test-framework/src/main/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java similarity index 100% rename from test-framework/src/main/java/org/elasticsearch/index/query/AbstractQueryTestCase.java rename to core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java From 7f06c12c7f246d0d0fdb8757b81d1c661fcf15c5 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Tue, 8 Dec 2015 23:44:28 -0500 Subject: [PATCH 31/57] restore randomness in test --- .../query/TemplateQueryBuilderTests.java | 23 +------------------ 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index aa5e9142041..df7eb3c697a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -19,26 +19,18 @@ package org.elasticsearch.index.query; -import com.carrotsearch.randomizedtesting.generators.RandomInts; - import org.apache.lucene.search.Query; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.AbstractQueryTestCase; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TemplateQueryBuilder; import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.script.Template; import org.junit.BeforeClass; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import java.util.Random; public class TemplateQueryBuilderTests extends AbstractQueryTestCase { @@ -49,20 +41,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase createQuery(Random r) { - switch (RandomInts.randomIntBetween(r, 0, 2)) { - case 0: - return new MatchAllQueryBuilder(); - case 1: - return new IdsQueryBuilder(); - case 2: - return EmptyQueryBuilder.PROTOTYPE; - default: - throw new UnsupportedOperationException(); - } + templateBase = RandomQueryBuilder.createQuery(getRandom()); } @Override From 3632ddf497c228c6df61853b13d8f215ad17239a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 00:00:40 -0500 Subject: [PATCH 32/57] split out mustache methods so groovy works again --- .../tests/ContextAndHeaderTransportTests.java | 181 -------- .../messy/tests/package-info.java | 1 + modules/lang-mustache/build.gradle | 2 +- .../tests/ContextAndHeaderTransportTests.java | 389 ++++++++++++++++++ .../messy/tests/package-info.java | 3 +- 5 files changed, 393 insertions(+), 183 deletions(-) create mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 1362975a92c..728a932d2b5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -27,16 +27,13 @@ import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; @@ -47,8 +44,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; @@ -62,15 +57,8 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.Template; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.script.groovy.GroovyScriptEngineService; -import org.elasticsearch.script.mustache.MustacheScriptEngineService; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders; -import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.SuggestBuilder; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; @@ -79,13 +67,10 @@ import org.junit.After; import org.junit.Before; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -284,172 +269,6 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { assertRequestsContainHeader(PutIndexedScriptRequest.class); } - public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient() - .preparePutIndexedScript( - MustacheScriptEngineService.NAME, - "my_script", - jsonBuilder().startObject().field("script", "{ \"match\": { \"name\": \"Star Wars\" }}").endObject() - .string()).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()).get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .setQuery( - QueryBuilders.templateQuery(new Template("my_script", ScriptType.INDEXED, - MustacheScriptEngineService.NAME, null, null))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - public void testThatIndexedScriptGetRequestInReducePhaseContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(GroovyScriptEngineService.NAME, "my_script", - jsonBuilder().startObject().field("script", "_value0 * 10").endObject().string()).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("s_field", "foo").field("l_field", 10).endObject()).get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - SearchResponse searchResponse = transportClient() - .prepareSearch(queryIndex) - .addAggregation( - AggregationBuilders - .terms("terms") - .field("s_field") - .subAggregation(AggregationBuilders.max("max").field("l_field")) - .subAggregation( - PipelineAggregatorBuilders.bucketScript("scripted").setBucketsPaths("max").script( - new Script("my_script", ScriptType.INDEXED, GroovyScriptEngineService.NAME, null)))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - public void testThatSearchTemplatesWithIndexedTemplatesGetRequestContainsContextAndHeaders() throws Exception { - PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(MustacheScriptEngineService.NAME, "the_template", - jsonBuilder().startObject().startObject("template").startObject("query").startObject("match") - .field("name", "{{query_string}}").endObject().endObject().endObject().endObject().string() - ).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - transportClient().prepareIndex(queryIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); - transportClient().admin().indices().prepareRefresh(queryIndex).get(); - - Map params = new HashMap<>(); - params.put("query_string", "star wars"); - - SearchResponse searchResponse = transportClient().prepareSearch(queryIndex).setTemplate(new Template("the_template", ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params)) - .get(); - - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - - public void testThatIndexedScriptGetRequestInPhraseSuggestContainsContextAndHeaders() throws Exception { - CreateIndexRequestBuilder builder = transportClient().admin().indices().prepareCreate("test").setSettings(settingsBuilder() - .put(indexSettings()) - .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. - .put("index.analysis.analyzer.text.tokenizer", "standard") - .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") - .put("index.analysis.filter.my_shingle.type", "shingle") - .put("index.analysis.filter.my_shingle.output_unigrams", true) - .put("index.analysis.filter.my_shingle.min_shingle_size", 2) - .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); - - XContentBuilder mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("properties") - .startObject("title") - .field("type", "string") - .field("analyzer", "text") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(builder.addMapping("type1", mapping)); - ensureGreen(); - - List titles = new ArrayList<>(); - - titles.add("United States House of Representatives Elections in Washington 2006"); - titles.add("United States House of Representatives Elections in Washington 2005"); - titles.add("State"); - titles.add("Houses of Parliament"); - titles.add("Representative Government"); - titles.add("Election"); - - List builders = new ArrayList<>(); - for (String title: titles) { - transportClient().prepareIndex("test", "type1").setSource("title", title).get(); - } - transportClient().admin().indices().prepareRefresh("test").get(); - - String filterStringAsFilter = XContentFactory.jsonBuilder() - .startObject() - .startObject("match_phrase") - .field("title", "{{suggestion}}") - .endObject() - .endObject() - .string(); - - PutIndexedScriptResponse scriptResponse = transportClient() - .preparePutIndexedScript( - MustacheScriptEngineService.NAME, - "my_script", - jsonBuilder().startObject().field("script", filterStringAsFilter).endObject() - .string()).get(); - assertThat(scriptResponse.isCreated(), is(true)); - - PhraseSuggestionBuilder suggest = phraseSuggestion("title") - .field("title") - .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") - .suggestMode("always") - .maxTermFreq(.99f) - .size(10) - .maxInspections(200) - ) - .confidence(0f) - .maxErrors(2f) - .shardSize(30000) - .size(10); - - PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(new Template("my_script", ScriptType.INDEXED, - MustacheScriptEngineService.NAME, null, null)); - - SearchRequestBuilder searchRequestBuilder = transportClient().prepareSearch("test").setSize(0); - SuggestBuilder suggestBuilder = new SuggestBuilder(); - String suggestText = "united states house of representatives elections in washington 2006"; - if (suggestText != null) { - suggestBuilder.setText(suggestText); - } - suggestBuilder.addSuggestion(filteredFilterSuggest); - searchRequestBuilder.suggest(suggestBuilder); - SearchResponse actionGet = searchRequestBuilder.execute().actionGet(); - assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(0)); - Suggest searchSuggest = actionGet.getSuggest(); - - assertSuggestionSize(searchSuggest, 0, 2, "title"); - - assertGetRequestsContainHeaders(".scripts"); - assertRequestsContainHeader(PutIndexedScriptRequest.class); - } - public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { String releventHeaderName = "relevant_" + randomHeaderKey; for (RestController restController : internalCluster().getDataNodeInstances(RestController.class)) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java index af27047d89a..adf34927ba4 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -42,6 +42,7 @@ renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java renamed: core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ChildQuerySearchTests.java renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java + ^^^^^ note: the methods from this test using mustache were moved to the mustache module under its messy tests package. renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateHistogramTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index b2e11c1c299..4e8e9cc345d 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -26,7 +26,7 @@ dependencies { compile "com.github.spullara.mustache.java:compiler:0.9.1" } -//compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' +compileTestJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked' integTest { cluster { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java new file mode 100644 index 00000000000..92d15332780 --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -0,0 +1,389 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.messy.tests; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; +import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.FilterClient; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService.ScriptType; +import org.elasticsearch.script.Template; +import org.elasticsearch.script.mustache.MustachePlugin; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.node.Node.HTTP_ENABLED; +import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; +import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +@ClusterScope(scope = SUITE) +public class ContextAndHeaderTransportTests extends ESIntegTestCase { + private static final List requests = new CopyOnWriteArrayList<>(); + private String randomHeaderKey = randomAsciiOfLength(10); + private String randomHeaderValue = randomAsciiOfLength(20); + private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); + private String lookupIndex = "lookup-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return settingsBuilder() + .put(super.nodeSettings(nodeOrdinal)) + .put("script.indexed", "on") + .put(HTTP_ENABLED, true) + .build(); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(ActionLoggingPlugin.class, MustachePlugin.class); + } + + @Before + public void createIndices() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("location").field("type", "geo_shape").endObject() + .startObject("name").field("type", "string").endObject() + .endObject() + .endObject().endObject().string(); + + Settings settings = settingsBuilder() + .put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. + .build(); + assertAcked(transportClient().admin().indices().prepareCreate(lookupIndex) + .setSettings(settings).addMapping("type", mapping)); + assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) + .setSettings(settings).addMapping("type", mapping)); + ensureGreen(queryIndex, lookupIndex); + + requests.clear(); + } + + @After + public void checkAllRequestsContainHeaders() { + assertRequestsContainHeader(IndexRequest.class); + assertRequestsContainHeader(RefreshRequest.class); + } + + public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception { + PutIndexedScriptResponse scriptResponse = transportClient() + .preparePutIndexedScript( + MustacheScriptEngineService.NAME, + "my_script", + jsonBuilder().startObject().field("script", "{ \"match\": { \"name\": \"Star Wars\" }}").endObject() + .string()).get(); + assertThat(scriptResponse.isCreated(), is(true)); + + transportClient().prepareIndex(queryIndex, "type", "1") + .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()).get(); + transportClient().admin().indices().prepareRefresh(queryIndex).get(); + + SearchResponse searchResponse = transportClient() + .prepareSearch(queryIndex) + .setQuery( + QueryBuilders.templateQuery(new Template("my_script", ScriptType.INDEXED, + MustacheScriptEngineService.NAME, null, null))).get(); + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 1); + + assertGetRequestsContainHeaders(".scripts"); + assertRequestsContainHeader(PutIndexedScriptRequest.class); + } + + public void testThatSearchTemplatesWithIndexedTemplatesGetRequestContainsContextAndHeaders() throws Exception { + PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(MustacheScriptEngineService.NAME, "the_template", + jsonBuilder().startObject().startObject("template").startObject("query").startObject("match") + .field("name", "{{query_string}}").endObject().endObject().endObject().endObject().string() + ).get(); + assertThat(scriptResponse.isCreated(), is(true)); + + transportClient().prepareIndex(queryIndex, "type", "1") + .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) + .get(); + transportClient().admin().indices().prepareRefresh(queryIndex).get(); + + Map params = new HashMap<>(); + params.put("query_string", "star wars"); + + SearchResponse searchResponse = transportClient().prepareSearch(queryIndex).setTemplate(new Template("the_template", ScriptType.INDEXED, MustacheScriptEngineService.NAME, null, params)) + .get(); + + assertNoFailures(searchResponse); + assertHitCount(searchResponse, 1); + + assertGetRequestsContainHeaders(".scripts"); + assertRequestsContainHeader(PutIndexedScriptRequest.class); + } + + public void testThatIndexedScriptGetRequestInPhraseSuggestContainsContextAndHeaders() throws Exception { + CreateIndexRequestBuilder builder = transportClient().admin().indices().prepareCreate("test").setSettings(settingsBuilder() + .put(indexSettings()) + .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. + .put("index.analysis.analyzer.text.tokenizer", "standard") + .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") + .put("index.analysis.filter.my_shingle.type", "shingle") + .put("index.analysis.filter.my_shingle.output_unigrams", true) + .put("index.analysis.filter.my_shingle.min_shingle_size", 2) + .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); + + XContentBuilder mapping = XContentFactory.jsonBuilder() + .startObject() + .startObject("type1") + .startObject("properties") + .startObject("title") + .field("type", "string") + .field("analyzer", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + assertAcked(builder.addMapping("type1", mapping)); + ensureGreen(); + + List titles = new ArrayList<>(); + + titles.add("United States House of Representatives Elections in Washington 2006"); + titles.add("United States House of Representatives Elections in Washington 2005"); + titles.add("State"); + titles.add("Houses of Parliament"); + titles.add("Representative Government"); + titles.add("Election"); + + List builders = new ArrayList<>(); + for (String title: titles) { + transportClient().prepareIndex("test", "type1").setSource("title", title).get(); + } + transportClient().admin().indices().prepareRefresh("test").get(); + + String filterStringAsFilter = XContentFactory.jsonBuilder() + .startObject() + .startObject("match_phrase") + .field("title", "{{suggestion}}") + .endObject() + .endObject() + .string(); + + PutIndexedScriptResponse scriptResponse = transportClient() + .preparePutIndexedScript( + MustacheScriptEngineService.NAME, + "my_script", + jsonBuilder().startObject().field("script", filterStringAsFilter).endObject() + .string()).get(); + assertThat(scriptResponse.isCreated(), is(true)); + + PhraseSuggestionBuilder suggest = phraseSuggestion("title") + .field("title") + .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") + .suggestMode("always") + .maxTermFreq(.99f) + .size(10) + .maxInspections(200) + ) + .confidence(0f) + .maxErrors(2f) + .shardSize(30000) + .size(10); + + PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(new Template("my_script", ScriptType.INDEXED, + MustacheScriptEngineService.NAME, null, null)); + + SearchRequestBuilder searchRequestBuilder = transportClient().prepareSearch("test").setSize(0); + SuggestBuilder suggestBuilder = new SuggestBuilder(); + String suggestText = "united states house of representatives elections in washington 2006"; + if (suggestText != null) { + suggestBuilder.setText(suggestText); + } + suggestBuilder.addSuggestion(filteredFilterSuggest); + searchRequestBuilder.suggest(suggestBuilder); + SearchResponse actionGet = searchRequestBuilder.execute().actionGet(); + assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(0)); + Suggest searchSuggest = actionGet.getSuggest(); + + assertSuggestionSize(searchSuggest, 0, 2, "title"); + + assertGetRequestsContainHeaders(".scripts"); + assertRequestsContainHeader(PutIndexedScriptRequest.class); + } + + private List getRequests(Class clazz) { + List results = new ArrayList<>(); + for (ActionRequest request : requests) { + if (request.getClass().equals(clazz)) { + results.add((T) request); + } + } + + return results; + } + + private void assertRequestsContainHeader(Class clazz) { + List classRequests = getRequests(clazz); + for (ActionRequest request : classRequests) { + assertRequestContainsHeader(request); + } + } + + private void assertGetRequestsContainHeaders() { + assertGetRequestsContainHeaders(this.lookupIndex); + } + + private void assertGetRequestsContainHeaders(String index) { + List getRequests = getRequests(GetRequest.class); + assertThat(getRequests, hasSize(greaterThan(0))); + + for (GetRequest request : getRequests) { + if (!request.index().equals(index)) { + continue; + } + assertRequestContainsHeader(request); + } + } + + private void assertRequestContainsHeader(ActionRequest request) { + String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); + if (request instanceof IndexRequest) { + IndexRequest indexRequest = (IndexRequest) request; + msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", randomHeaderKey, + indexRequest.index(), indexRequest.type(), indexRequest.id()); + } + assertThat(msg, request.hasHeader(randomHeaderKey), is(true)); + assertThat(request.getHeader(randomHeaderKey).toString(), is(randomHeaderValue)); + } + + /** + * a transport client that adds our random header + */ + private Client transportClient() { + Client transportClient = internalCluster().transportClient(); + FilterClient filterClient = new FilterClient(transportClient) { + @Override + protected > void doExecute(Action action, Request request, ActionListener listener) { + request.putHeader(randomHeaderKey, randomHeaderValue); + super.doExecute(action, request, listener); + } + }; + + return filterClient; + } + + public static class ActionLoggingPlugin extends Plugin { + + @Override + public String name() { + return "test-action-logging"; + } + + @Override + public String description() { + return "Test action logging"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new ActionLoggingModule()); + } + + public void onModule(ActionModule module) { + module.registerFilter(LoggingFilter.class); + } + } + + public static class ActionLoggingModule extends AbstractModule { + @Override + protected void configure() { + bind(LoggingFilter.class).asEagerSingleton(); + } + + } + + public static class LoggingFilter extends ActionFilter.Simple { + + @Inject + public LoggingFilter(Settings settings) { + super(settings); + } + + @Override + public int order() { + return 999; + } + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + requests.add(request); + return true; + } + + @Override + protected boolean apply(String action, ActionResponse response, ActionListener listener) { + return true; + } + } +} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java index 9b7b6f55c6e..46542313821 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -34,11 +34,12 @@ * */ /* List of renames that took place: -renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java -> modules/lang-mustache/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java renamed: core/src/test/java/org/elasticsearch/validate/RenderSearchTemplateIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java renamed: core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryParserTests.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java +renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> module/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java + ^^^^^ note: just the methods from this test using mustache were moved here, the others use groovy and are in the groovy module under its messy tests package. */ package org.elasticsearch.messy.tests; \ No newline at end of file From 17436d4332722f5bf1f487162b80793b76f9ad01 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 00:10:37 -0500 Subject: [PATCH 33/57] get gradle check passing for lang-mustache --- .../licenses/compiler-0.9.1.jar.sha1 | 0 .../licenses/compiler-LICENSE.txt | 0 .../licenses/compiler-NOTICE.txt | 0 .../script/mustache/MustacheRestIT.java | 48 +++++++++++++++++++ .../test/lang_mustache/10_basic.yaml | 14 ++++++ 5 files changed, 62 insertions(+) rename {distribution => modules/lang-mustache}/licenses/compiler-0.9.1.jar.sha1 (100%) rename {distribution => modules/lang-mustache}/licenses/compiler-LICENSE.txt (100%) rename {distribution => modules/lang-mustache}/licenses/compiler-NOTICE.txt (100%) create mode 100644 modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java create mode 100644 modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml diff --git a/distribution/licenses/compiler-0.9.1.jar.sha1 b/modules/lang-mustache/licenses/compiler-0.9.1.jar.sha1 similarity index 100% rename from distribution/licenses/compiler-0.9.1.jar.sha1 rename to modules/lang-mustache/licenses/compiler-0.9.1.jar.sha1 diff --git a/distribution/licenses/compiler-LICENSE.txt b/modules/lang-mustache/licenses/compiler-LICENSE.txt similarity index 100% rename from distribution/licenses/compiler-LICENSE.txt rename to modules/lang-mustache/licenses/compiler-LICENSE.txt diff --git a/distribution/licenses/compiler-NOTICE.txt b/modules/lang-mustache/licenses/compiler-NOTICE.txt similarity index 100% rename from distribution/licenses/compiler-NOTICE.txt rename to modules/lang-mustache/licenses/compiler-NOTICE.txt diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java new file mode 100644 index 00000000000..0c489b3afb1 --- /dev/null +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; +import java.util.Collection; + +public class MustacheRestIT extends ESRestTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(MustachePlugin.class); + } + + public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } +} + diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml new file mode 100644 index 00000000000..70a9693f594 --- /dev/null +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml @@ -0,0 +1,14 @@ +# Integration tests for Mustache scripts +# +"Mustache loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.modules.0.name: lang-mustache } + - match: { nodes.$master.modules.0.jvm: true } From a6e1655fe9eb1b3c98f8bc2f4a06e95b7bcad837 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 00:30:13 -0500 Subject: [PATCH 34/57] fix integ tests --- .../test/lang_mustache/10_basic.yaml | 57 +++++++++++++++++++ .../test/lang_mustache}/20_search.yaml | 0 .../30_render_search_template.yaml | 0 .../30_template_query_execution.yaml | 0 .../40_search_request_template.yaml | 0 .../test/lang_mustache/50_msearch.yaml | 1 + .../rest-api-spec/test/template/10_basic.yaml | 56 ------------------ 7 files changed, 58 insertions(+), 56 deletions(-) rename {rest-api-spec/src/main/resources/rest-api-spec/test/template => modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache}/20_search.yaml (100%) rename {rest-api-spec/src/main/resources/rest-api-spec/test/template => modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache}/30_render_search_template.yaml (100%) rename {rest-api-spec/src/main/resources/rest-api-spec/test/search => modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache}/30_template_query_execution.yaml (100%) rename {rest-api-spec/src/main/resources/rest-api-spec/test/search => modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache}/40_search_request_template.yaml (100%) rename rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yaml => modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml (99%) delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/template/10_basic.yaml diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml index 70a9693f594..9bfea28abfa 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml @@ -12,3 +12,60 @@ - match: { nodes.$master.modules.0.name: lang-mustache } - match: { nodes.$master.modules.0.jvm: true } + +--- +"Indexed template": + + - do: + put_template: + id: "1" + body: { "template": { "query": { "match_all": {}}, "size": "{{my_size}}" } } + - match: { _id: "1" } + + - do: + get_template: + id: 1 + - match: { found: true } + - match: { lang: mustache } + - match: { _id: "1" } + - match: { _version: 1 } + - match: { template: /.*query\S\S\S\Smatch_all.*/ } + + - do: + catch: missing + get_template: + id: 2 + - match: { found: false } + - match: { lang: mustache } + - match: { _id: "2" } + - is_false: _version + - is_false: template + + - do: + delete_template: + id: "1" + - match: { found: true } + - match: { _index: ".scripts" } + - match: { _id: "1" } + - match: { _version: 2} + + - do: + catch: missing + delete_template: + id: "non_existing" + - match: { found: false } + - match: { _index: ".scripts" } + - match: { _id: "non_existing" } + - match: { _version: 1 } + + - do: + catch: request + put_template: + id: "1" + body: { "template": { "query": { "match{{}}_all": {}}, "size": "{{my_size}}" } } + + - do: + catch: /Unable\sto\sparse.*/ + put_template: + id: "1" + body: { "template": { "query": { "match{{}}_all": {}}, "size": "{{my_size}}" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/template/20_search.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/template/20_search.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/20_search.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/template/30_render_search_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_render_search_template.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/template/30_render_search_template.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_render_search_template.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_template_query_execution.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_template_query_execution.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/search/30_template_query_execution.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_template_query_execution.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/40_search_request_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/search/40_search_request_template.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/40_search_request_template.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml similarity index 99% rename from rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml index 49e34fb16cd..205070be13e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml @@ -51,3 +51,4 @@ - query: { "template": { "query": { "term": { "foo": { "value": "{{template}}" } } }, "params": { "template": "bar" } } } - match: { responses.0.hits.total: 1 } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/template/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/template/10_basic.yaml deleted file mode 100644 index bd1fd436648..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/template/10_basic.yaml +++ /dev/null @@ -1,56 +0,0 @@ ---- -"Indexed template": - - - do: - put_template: - id: "1" - body: { "template": { "query": { "match_all": {}}, "size": "{{my_size}}" } } - - match: { _id: "1" } - - - do: - get_template: - id: 1 - - match: { found: true } - - match: { lang: mustache } - - match: { _id: "1" } - - match: { _version: 1 } - - match: { template: /.*query\S\S\S\Smatch_all.*/ } - - - do: - catch: missing - get_template: - id: 2 - - match: { found: false } - - match: { lang: mustache } - - match: { _id: "2" } - - is_false: _version - - is_false: template - - - do: - delete_template: - id: "1" - - match: { found: true } - - match: { _index: ".scripts" } - - match: { _id: "1" } - - match: { _version: 2} - - - do: - catch: missing - delete_template: - id: "non_existing" - - match: { found: false } - - match: { _index: ".scripts" } - - match: { _id: "non_existing" } - - match: { _version: 1 } - - - do: - catch: request - put_template: - id: "1" - body: { "template": { "query": { "match{{}}_all": {}}, "size": "{{my_size}}" } } - - - do: - catch: /Unable\sto\sparse.*/ - put_template: - id: "1" - body: { "template": { "query": { "match{{}}_all": {}}, "size": "{{my_size}}" } } From b7fb0824beb7531db247c8defdc190b730a839dc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Dec 2015 21:29:33 -0800 Subject: [PATCH 35/57] Fix render search template tests --- .../config/scripts/file_template_1.mustache | 1 - .../tests/RenderSearchTemplateTests.java | 19 +++++++++++++++---- 2 files changed, 15 insertions(+), 5 deletions(-) delete mode 100644 core/src/test/resources/org/elasticsearch/validate/config/scripts/file_template_1.mustache diff --git a/core/src/test/resources/org/elasticsearch/validate/config/scripts/file_template_1.mustache b/core/src/test/resources/org/elasticsearch/validate/config/scripts/file_template_1.mustache deleted file mode 100644 index 969dc8d5987..00000000000 --- a/core/src/test/resources/org/elasticsearch/validate/config/scripts/file_template_1.mustache +++ /dev/null @@ -1 +0,0 @@ -{"size":"{{size}}","query":{"match":{"foo":"{{value}}"}},"aggs":{"objects":{"terms":{"field":"{{value}}","size":"{{size}}"}}}} \ No newline at end of file diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index 84994096fa6..87cc51c2ec2 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -31,7 +32,10 @@ import org.elasticsearch.script.Template; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.rest.support.FileUtils; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -41,7 +45,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -@ESIntegTestCase.SuiteScopeTestCase @ESIntegTestCase.AwaitsFix(bugUrl = "nopush") +@ESIntegTestCase.SuiteScopeTestCase public class RenderSearchTemplateTests extends ESIntegTestCase { private static final String TEMPLATE_CONTENTS = "{\"size\":\"{{size}}\",\"query\":{\"match\":{\"foo\":\"{{value}}\"}},\"aggs\":{\"objects\":{\"terms\":{\"field\":\"{{value}}\",\"size\":\"{{size}}\"}}}}"; @@ -49,7 +53,7 @@ public class RenderSearchTemplateTests extends ESIntegTestCase { protected Collection> nodePlugins() { return Collections.singleton(MustachePlugin.class); } - + @Override protected void setupSuiteScopeCluster() throws Exception { client().preparePutIndexedScript(MustacheScriptEngineService.NAME, "index_template_1", "{ \"template\": " + TEMPLATE_CONTENTS + " }").get(); @@ -57,9 +61,16 @@ public class RenderSearchTemplateTests extends ESIntegTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { - //Set path so ScriptService will pick up the test scripts + Path configDir = createTempDir(); + Path scriptsDir = configDir.resolve("scripts"); + try { + Files.createDirectories(scriptsDir); + Files.write(scriptsDir.resolve("file_template_1.mustache"), TEMPLATE_CONTENTS.getBytes("UTF-8")); + } catch (Exception e) { + throw new RuntimeException(e); + } return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", this.getDataPath("config")).build(); + .put("path.conf", configDir).build(); } public void testInlineTemplate() { From 74dc5bf20a78a031b046d03ae352fb9f104188cd Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 8 Dec 2015 21:33:19 -0800 Subject: [PATCH 36/57] Fix template query parser tests to register mustache script engine --- .../messy/tests/TemplateQueryParserTests.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 940f4e7d134..29213f0ac0e 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.ThreadPool; @@ -76,7 +77,6 @@ import static org.hamcrest.Matchers.containsString; * Test parsing and executing a template request. */ // NOTE: this can't be migrated to ESSingleNodeTestCase because of the custom path.conf -@ESTestCase.AwaitsFix(bugUrl = "nopush") public class TemplateQueryParserTests extends ESTestCase { private Injector injector; @@ -97,6 +97,9 @@ public class TemplateQueryParserTests extends ESTestCase { }); Index index = new Index("test"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); + ScriptModule scriptModule = new ScriptModule(settings); + // TODO: make this use a mock engine instead of mustache and it will no longer be messy! + scriptModule.addScriptEngine(MustacheScriptEngineService.class); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), new SettingsModule(settings, new SettingsFilter(settings)), @@ -108,7 +111,7 @@ public class TemplateQueryParserTests extends ESTestCase { bindQueryParsersExtension(); } }, - new ScriptModule(settings), + scriptModule, new IndexSettingsModule(index, settings), new AbstractModule() { @Override From 1909c3d8cc5a82a97fd472c07c5a96712e3f7e21 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 8 Dec 2015 10:21:33 +0100 Subject: [PATCH 37/57] Only text fields should accept analyzer and term vector settings. Currently almost all our fields accept the `analyzer` and `term_vector` settings although they only make sense on text fields. This commit forbids those settings on all fields but `string` and `_all` for indices created on or after version 2.2.0. --- .../index/mapper/core/StringFieldMapper.java | 4 +- .../index/mapper/core/TypeParsers.java | 109 +++++++++++------- .../index/mapper/internal/AllFieldMapper.java | 4 +- .../mapper/numeric/SimpleNumericTests.java | 62 ++++++++++ 4 files changed, 134 insertions(+), 45 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index a5c681d59a5..0a921ad85eb 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -46,7 +46,7 @@ import java.util.Map; import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { @@ -159,7 +159,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { StringFieldMapper.Builder builder = stringField(name); - parseField(builder, name, node, parserContext); + parseTextField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index a3938a48a5b..e530243657c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -182,9 +182,72 @@ public class TypeParsers { } } - public static void parseField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { + private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = builder.fieldType().indexAnalyzer(); NamedAnalyzer searchAnalyzer = builder.fieldType().searchAnalyzer(); + + for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + final String propName = Strings.toUnderscoreCase(entry.getKey()); + final Object propNode = entry.getValue(); + if (propName.equals("term_vector")) { + parseTermVector(name, propNode.toString(), builder); + iterator.remove(); + } else if (propName.equals("store_term_vectors")) { + builder.storeTermVectors(nodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("store_term_vector_offsets")) { + builder.storeTermVectorOffsets(nodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("store_term_vector_positions")) { + builder.storeTermVectorPositions(nodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("store_term_vector_payloads")) { + builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); + iterator.remove(); + } else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0 + propName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { + + NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); + if (analyzer == null) { + throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); + } + indexAnalyzer = analyzer; + iterator.remove(); + } else if (propName.equals("search_analyzer")) { + NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); + if (analyzer == null) { + throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); + } + searchAnalyzer = analyzer; + iterator.remove(); + } + } + + if (indexAnalyzer == null) { + if (searchAnalyzer != null) { + throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set"); + } + } else if (searchAnalyzer == null) { + searchAnalyzer = indexAnalyzer; + } + builder.indexAnalyzer(indexAnalyzer); + builder.searchAnalyzer(searchAnalyzer); + } + + /** + * Parse text field attributes. In addition to {@link #parseField common attributes} + * this will parse analysis and term-vectors related settings. + */ + public static void parseTextField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { + parseField(builder, name, fieldNode, parserContext); + parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); + } + + /** + * Parse common field attributes such as {@code doc_values} or {@code store}. + */ + public static void parseField(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { Version indexVersionCreated = parserContext.indexVersionCreated(); for (Iterator> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -202,24 +265,9 @@ public class TypeParsers { } else if (propName.equals(DOC_VALUES)) { builder.docValues(nodeBooleanValue(propNode)); iterator.remove(); - } else if (propName.equals("term_vector")) { - parseTermVector(name, propNode.toString(), builder); - iterator.remove(); } else if (propName.equals("boost")) { builder.boost(nodeFloatValue(propNode)); iterator.remove(); - } else if (propName.equals("store_term_vectors")) { - builder.storeTermVectors(nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("store_term_vector_offsets")) { - builder.storeTermVectorOffsets(nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("store_term_vector_positions")) { - builder.storeTermVectorPositions(nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("store_term_vector_payloads")) { - builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); - iterator.remove(); } else if (propName.equals("omit_norms")) { builder.omitNorms(nodeBooleanValue(propNode)); iterator.remove(); @@ -250,22 +298,6 @@ public class TypeParsers { } else if (propName.equals("index_options")) { builder.indexOptions(nodeIndexOptionValue(propNode)); iterator.remove(); - } else if (propName.equals("analyzer") || // for backcompat, reading old indexes, remove for v3.0 - propName.equals("index_analyzer") && indexVersionCreated.before(Version.V_2_0_0_beta1)) { - - NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); - if (analyzer == null) { - throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); - } - indexAnalyzer = analyzer; - iterator.remove(); - } else if (propName.equals("search_analyzer")) { - NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); - if (analyzer == null) { - throw new MapperParsingException("analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); - } - searchAnalyzer = analyzer; - iterator.remove(); } else if (propName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(propNode)); iterator.remove(); @@ -296,16 +328,11 @@ public class TypeParsers { iterator.remove(); } } - - if (indexAnalyzer == null) { - if (searchAnalyzer != null) { - throw new MapperParsingException("analyzer on field [" + name + "] must be set when search_analyzer is set"); - } - } else if (searchAnalyzer == null) { - searchAnalyzer = indexAnalyzer; + if (indexVersionCreated.before(Version.V_2_2_0)) { + // analyzer, search_analyzer, term_vectors were accepted on all fields + // before 2.2, even though it made little sense + parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); } - builder.indexAnalyzer(indexAnalyzer); - builder.searchAnalyzer(searchAnalyzer); } public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 3166a683397..645c36a4855 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -49,7 +49,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; +import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField; /** * @@ -134,7 +134,7 @@ public class AllFieldMapper extends MetadataFieldMapper { } } - parseField(builder, builder.name, node, parserContext); + parseTextField(builder, builder.name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index de2957cae34..d93ae9b6787 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -24,6 +24,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; @@ -41,9 +43,11 @@ import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; +import java.util.Arrays; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -510,4 +514,62 @@ public class SimpleNumericTests extends ESSingleNodeTestCase { assertThat(ts, instanceOf(NumericTokenStream.class)); assertEquals(expected, ((NumericTokenStream)ts).getPrecisionStep()); } + + public void testTermVectorsBackCompat() throws Exception { + for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { + doTestTermVectorsBackCompat(type); + } + } + + private void doTestTermVectorsBackCompat(String type) throws Exception { + DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); + String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", type) + .field("term_vector", "yes") + .endObject() + .endObject().endObject().endObject().string(); + try { + parser.parse(mappingWithTV); + fail(); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]")); + } + + Settings oldIndexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) + .build(); + parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); + parser.parse(mappingWithTV); // no exception + } + + public void testAnalyzerBackCompat() throws Exception { + for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { + doTestAnalyzerBackCompat(type); + } + } + + private void doTestAnalyzerBackCompat(String type) throws Exception { + DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); + String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", type) + .field("analyzer", "keyword") + .endObject() + .endObject().endObject().endObject().string(); + try { + parser.parse(mappingWithTV); + fail(); + } catch (MapperParsingException e) { + assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]")); + } + + Settings oldIndexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) + .build(); + parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); + parser.parse(mappingWithTV); // no exception + } } From ccd0543172da4cb4883807c58bb5fae57de3dd39 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 8 Dec 2015 16:48:00 +0100 Subject: [PATCH 38/57] CancellableThreads should also treat ThreadInterruptedException as InterruptedException RecoverySource uses the RateLimiter under a cancelable thread. The SimpleRateLimiter used in throws ThreadInterruptedException on interruption. We should treat it as InterruptedException --- .../java/org/elasticsearch/common/util/CancellableThreads.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java index b8c5ba09b9c..a605d66e80d 100644 --- a/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java +++ b/core/src/main/java/org/elasticsearch/common/util/CancellableThreads.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.util; +import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; @@ -84,7 +85,7 @@ public class CancellableThreads { RuntimeException throwable = null; try { interruptable.run(); - } catch (InterruptedException e) { + } catch (InterruptedException | ThreadInterruptedException e) { // assume this is us and ignore } catch (RuntimeException t) { throwable = t; From cb84b1ff1ad1da54467ca85dd03fc772c28b19f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 2 Dec 2015 17:28:09 +0100 Subject: [PATCH 39/57] Make HighlightBuilder produce SearchContextHighlight The HighlightBuilder should be able to procude a SeachContextHighlight object which contains the merged global and field options, also contains objects that can only be created on the index shard (like the actual lucene Query object used during highlighting). This is done by the build() method of the HighlighBuilder. Also adding tests that make sure the produced SearchContextHighlighter is similar to the one we would get when parsing the xContent directly with the current HighlightParseElement. --- .../highlight/AbstractHighlighterBuilder.java | 8 +- .../search/highlight/HighlightBuilder.java | 153 +++++++++++++++++- .../highlight/HighlighterParseElement.java | 45 +----- .../highlight/SearchContextHighlight.java | 4 + .../highlight/HighlightBuilderTests.java | 96 ++++++++--- 5 files changed, 233 insertions(+), 73 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index e1818053596..d30144f777f 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -125,7 +125,7 @@ public abstract class AbstractHighlighterBuilderfvh should provide highlighting on filter clauses */ + public static final boolean DEFAULT_HIGHLIGHT_FILTER = false; + /** default for highlight fragments being ordered by score */ + public static final boolean DEFAULT_SCORE_ORDERED = false; + /** the default encoder setting */ + public static final String DEFAULT_ENCODER = "default"; + /** default for the maximum number of phrases the fvh will consider */ + public static final int DEFAULT_PHRASE_LIMIT = 256; + /** default for fragment size when there are no matches */ + public static final int DEFAULT_NO_MATCH_SIZE = 0; + /** the default number of fragments for highlighting */ + public static final int DEFAULT_NUMBER_OF_FRAGMENTS = 5; + /** the default number of fragments size in characters */ + public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100; + /** the default opening tag */ + public static final String[] DEFAULT_PRE_TAGS = new String[]{""}; + /** the default closing tag */ + public static final String[] DEFAULT_POST_TAGS = new String[]{""}; + + /** the default opening tags when tag_schema = "styled" */ + public static final String[] STYLED_PRE_TAG = { + "", "", "", + "", "", "", + "", "", "", + "" + }; + /** the default closing tags when tag_schema = "styled" */ + public static final String[] STYLED_POST_TAGS = {""}; + + /** + * a {@link FieldOptions.Builder} with default settings + */ + public final static Builder defaultFieldOptions() { + return new SearchContextHighlight.FieldOptions.Builder() + .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER) + .requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS) + .encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) + .boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) + .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT); + } + private final List fields = new ArrayList<>(); private String encoder; @@ -120,12 +177,12 @@ public class HighlightBuilder extends AbstractHighlighterBuilder fieldOptions = new ArrayList<>(); + for (Field field : this.fields) { + final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder(); + fieldOptionsBuilder.fragmentOffset(field.fragmentOffset); + if (field.matchedFields != null) { + Set matchedFields = new HashSet(field.matchedFields.length); + Collections.addAll(matchedFields, field.matchedFields); + fieldOptionsBuilder.matchedFields(matchedFields); + } + transferOptions(field, fieldOptionsBuilder, context); + fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder.merge(globalOptionsBuilder.build()).build())); + } + return new SearchContextHighlight(fieldOptions); + } + + /** + * Transfers field options present in the input {@link AbstractHighlighterBuilder} to the receiving + * {@link FieldOptions.Builder}, effectively overwriting existing settings + * @param targetOptionsBuilder the receiving options builder + * @param highlighterBuilder highlight builder with the input options + * @param context needed to convert {@link QueryBuilder} to {@link Query} + * @throws IOException on errors parsing any optional nested highlight query + */ + @SuppressWarnings({ "rawtypes", "unchecked" }) + private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder, SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException { + targetOptionsBuilder.preTags(highlighterBuilder.preTags); + targetOptionsBuilder.postTags(highlighterBuilder.postTags); + targetOptionsBuilder.scoreOrdered("score".equals(highlighterBuilder.order)); + if (highlighterBuilder.highlightFilter != null) { + targetOptionsBuilder.highlightFilter(highlighterBuilder.highlightFilter); + } + if (highlighterBuilder.fragmentSize != null) { + targetOptionsBuilder.fragmentCharSize(highlighterBuilder.fragmentSize); + } + if (highlighterBuilder.numOfFragments != null) { + targetOptionsBuilder.numberOfFragments(highlighterBuilder.numOfFragments); + } + if (highlighterBuilder.requireFieldMatch != null) { + targetOptionsBuilder.requireFieldMatch(highlighterBuilder.requireFieldMatch); + } + if (highlighterBuilder.boundaryMaxScan != null) { + targetOptionsBuilder.boundaryMaxScan(highlighterBuilder.boundaryMaxScan); + } + targetOptionsBuilder.boundaryChars(convertCharArray(highlighterBuilder.boundaryChars)); + targetOptionsBuilder.highlighterType(highlighterBuilder.highlighterType); + targetOptionsBuilder.fragmenter(highlighterBuilder.fragmenter); + if (highlighterBuilder.noMatchSize != null) { + targetOptionsBuilder.noMatchSize(highlighterBuilder.noMatchSize); + } + if (highlighterBuilder.forceSource != null) { + targetOptionsBuilder.forceSource(highlighterBuilder.forceSource); + } + if (highlighterBuilder.phraseLimit != null) { + targetOptionsBuilder.phraseLimit(highlighterBuilder.phraseLimit); + } + targetOptionsBuilder.options(highlighterBuilder.options); + if (highlighterBuilder.highlightQuery != null) { + targetOptionsBuilder.highlightQuery(highlighterBuilder.highlightQuery.toQuery(context)); + } + } + + private static Character[] convertCharArray(char[] array) { + if (array == null) { + return null; + } + Character[] charArray = new Character[array.length]; + for (int i = 0; i < array.length; i++) { + charArray[i] = array[i]; + } + return charArray; + } public void innerXContent(XContentBuilder builder) throws IOException { // first write common options diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java index 298a6670582..15963267d35 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.highlight; -import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; @@ -52,39 +51,6 @@ import java.util.Set; */ public class HighlighterParseElement implements SearchParseElement { - /** default for whether to highlight fields based on the source even if stored separately */ - public static final boolean DEFAULT_FORCE_SOURCE = false; - /** default for whether a field should be highlighted only if a query matches that field */ - public static final boolean DEFAULT_REQUIRE_FIELD_MATCH = true; - /** default for whether fvh should provide highlighting on filter clauses */ - public static final boolean DEFAULT_HIGHLIGHT_FILTER = false; - /** default for highlight fragments being ordered by score */ - public static final boolean DEFAULT_SCORE_ORDERED = false; - /** the default encoder setting */ - public static final String DEFAULT_ENCODER = "default"; - /** default for the maximum number of phrases the fvh will consider */ - public static final int DEFAULT_PHRASE_LIMIT = 256; - /** default for fragment size when there are no matches */ - public static final int DEFAULT_NO_MATCH_SIZE = 0; - /** the default number of fragments for highlighting */ - public static final int DEFAULT_NUMBER_OF_FRAGMENTS = 5; - /** the default number of fragments size in characters */ - public static final int DEFAULT_FRAGMENT_CHAR_SIZE = 100; - /** the default opening tag */ - public static final String[] DEFAULT_PRE_TAGS = new String[]{""}; - /** the default closing tag */ - public static final String[] DEFAULT_POST_TAGS = new String[]{""}; - - /** the default opening tags when tag_schema = "styled" */ - public static final String[] STYLED_PRE_TAG = { - "", "", "", - "", "", "", - "", "", "", - "" - }; - /** the default closing tags when tag_schema = "styled" */ - public static final String[] STYLED_POST_TAGS = {""}; - @Override public void parse(XContentParser parser, SearchContext context) throws Exception { try { @@ -99,12 +65,7 @@ public class HighlighterParseElement implements SearchParseElement { String topLevelFieldName = null; final List> fieldsOptions = new ArrayList<>(); - final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder() - .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER) - .requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS) - .encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) - .boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) - .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT); + final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = HighlightBuilder.defaultFieldOptions(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -147,8 +108,8 @@ public class HighlighterParseElement implements SearchParseElement { } else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) { String schema = parser.text(); if ("styled".equals(schema)) { - globalOptionsBuilder.preTags(STYLED_PRE_TAG); - globalOptionsBuilder.postTags(STYLED_POST_TAGS); + globalOptionsBuilder.preTags(HighlightBuilder.STYLED_PRE_TAG); + globalOptionsBuilder.postTags(HighlightBuilder.STYLED_POST_TAGS); } } else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) { globalOptionsBuilder.highlightFilter(parser.booleanValue()); diff --git a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java b/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java index 38a8147b105..293143fb1db 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java @@ -53,6 +53,10 @@ public class SearchContextHighlight { this.globalForceSource = globalForceSource; } + boolean globalForceSource() { + return this.globalForceSource; + } + public boolean forceSource(Field field) { if (globalForceSource) { return true; diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 8d6edff5c44..75c953b44f4 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -21,6 +21,8 @@ package org.elasticsearch.search.highlight; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -32,18 +34,21 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.IdsQueryParser; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.TermQueryParser; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightBuilder.Field; +import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -51,6 +56,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -73,8 +79,6 @@ public class HighlightBuilderTests extends ESTestCase { @SuppressWarnings("rawtypes") Set injectedQueryParsers = new HashSet<>(); injectedQueryParsers.add(new MatchAllQueryParser()); - injectedQueryParsers.add(new IdsQueryParser()); - injectedQueryParsers.add(new TermQueryParser()); indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); } @@ -128,7 +132,7 @@ public class HighlightBuilderTests extends ESTestCase { } /** - * Generic test that creates new highlighter from the test highlighter and checks both for equality + * creates random highlighter, renders it to xContent and back to new instance that should be equal to original */ public void testFromXContent() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); @@ -261,6 +265,63 @@ public class HighlightBuilderTests extends ESTestCase { } catch (ParsingException e) { assertEquals("cannot parse object with name [bad_fieldname]", e.getMessage()); } + } + + /** + * test that build() outputs a {@link SearchContextHighlight} that is similar to the one + * we would get when parsing the xContent the test highlight builder is rendering out + */ + public void testBuildSearchContextHighlight() throws IOException { + Settings indexSettings = Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + Index index = new Index(randomAsciiOfLengthBetween(1, 10)); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); + // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry); + + for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { + HighlightBuilder highlightBuilder = randomHighlighterBuilder(); + SearchContextHighlight highlight = highlightBuilder.build(mockShardContext); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + highlightBuilder.innerXContent(builder); + builder.endObject(); + XContentParser parser = XContentHelper.createParser(builder.bytes()); + + SearchContextHighlight parsedHighlight = new HighlighterParseElement().parse(parser, mockShardContext); + assertNotSame(highlight, parsedHighlight); + assertEquals(highlight.globalForceSource(), parsedHighlight.globalForceSource()); + assertEquals(highlight.fields().size(), parsedHighlight.fields().size()); + + Iterator iterator = parsedHighlight.fields().iterator(); + for (org.elasticsearch.search.highlight.SearchContextHighlight.Field field : highlight.fields()) { + org.elasticsearch.search.highlight.SearchContextHighlight.Field otherField = iterator.next(); + assertEquals(field.field(), otherField.field()); + FieldOptions options = field.fieldOptions(); + FieldOptions otherOptions = otherField.fieldOptions(); + assertArrayEquals(options.boundaryChars(), options.boundaryChars()); + assertEquals(options.boundaryMaxScan(), otherOptions.boundaryMaxScan()); + assertEquals(options.encoder(), otherOptions.encoder()); + assertEquals(options.fragmentCharSize(), otherOptions.fragmentCharSize()); + assertEquals(options.fragmenter(), otherOptions.fragmenter()); + assertEquals(options.fragmentOffset(), otherOptions.fragmentOffset()); + assertEquals(options.highlighterType(), otherOptions.highlighterType()); + assertEquals(options.highlightFilter(), otherOptions.highlightFilter()); + assertEquals(options.highlightQuery(), otherOptions.highlightQuery()); + assertEquals(options.matchedFields(), otherOptions.matchedFields()); + assertEquals(options.noMatchSize(), otherOptions.noMatchSize()); + assertEquals(options.numberOfFragments(), otherOptions.numberOfFragments()); + assertEquals(options.options(), otherOptions.options()); + assertEquals(options.phraseLimit(), otherOptions.phraseLimit()); + assertArrayEquals(options.preTags(), otherOptions.preTags()); + assertArrayEquals(options.postTags(), otherOptions.postTags()); + assertEquals(options.requireFieldMatch(), otherOptions.requireFieldMatch()); + assertEquals(options.scoreOrdered(), otherOptions.scoreOrdered()); + } + } } /** @@ -277,9 +338,9 @@ public class HighlightBuilderTests extends ESTestCase { context.reset(parser); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); - assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlighterParseElement.STYLED_PRE_TAG, + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.STYLED_PRE_TAG, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlighterParseElement.STYLED_POST_TAGS, + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.STYLED_POST_TAGS, highlightBuilder.postTags()); highlightElement = "{\n" + @@ -289,9 +350,9 @@ public class HighlightBuilderTests extends ESTestCase { context.reset(parser); highlightBuilder = HighlightBuilder.fromXContent(context); - assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlighterParseElement.DEFAULT_PRE_TAGS, + assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlighterParseElement.DEFAULT_POST_TAGS, + assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); highlightElement = "{\n" + @@ -362,20 +423,9 @@ public class HighlightBuilderTests extends ESTestCase { highlightBuilder.fragmenter(randomAsciiOfLengthBetween(1, 10)); } if (randomBoolean()) { - QueryBuilder highlightQuery; - switch (randomInt(2)) { - case 0: - highlightQuery = new MatchAllQueryBuilder(); - break; - case 1: - highlightQuery = new IdsQueryBuilder(); - break; - default: - case 2: - highlightQuery = new TermQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); - break; - } + QueryBuilder highlightQuery = new MatchAllQueryBuilder(); highlightQuery.boost((float) randomDoubleBetween(0, 10, false)); + highlightQuery.queryName(randomAsciiOfLength(10)); highlightBuilder.highlightQuery(highlightQuery); } if (randomBoolean()) { From 3e47f904601e2b0b3373bd3889f80a9800291998 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 9 Dec 2015 14:56:50 +0100 Subject: [PATCH 40/57] Remove XContentParser.estimatedNumberType(). The goal of this method is to know whether the xcontent impl knows how to differenciate floats from doubles or longs from ints or if it's just guessing. However, all implementations return true (which is correct for yaml and json, but cbor and smile should be able to differenciate). I first tried to implement this method correctly but it raised many issues because eg. most impls write a long as an integer when it is small enough. So I suggest that we remove this method and just treat cbor and smile like yaml and json, which is already what is happening today anyway. --- .../common/xcontent/XContentParser.java | 6 ---- .../xcontent/json/JsonXContentParser.java | 5 --- .../index/mapper/DocumentParser.java | 35 ++----------------- 3 files changed, 2 insertions(+), 44 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java index b68d3e11f14..d647c5f0134 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParser.java @@ -178,12 +178,6 @@ public interface XContentParser extends Releasable { NumberType numberType() throws IOException; - /** - * Is the number type estimated or not (i.e. an int might actually be a long, its just low enough - * to be an int). - */ - boolean estimatedNumberType(); - short shortValue(boolean coerce) throws IOException; int intValue(boolean coerce) throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java index 787c28324de..c3aca7626b7 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentParser.java @@ -68,11 +68,6 @@ public class JsonXContentParser extends AbstractXContentParser { return convertNumberType(parser.getNumberType()); } - @Override - public boolean estimatedNumberType() { - return true; - } - @Override public String currentName() throws IOException { return parser.getCurrentName(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index ce2cbd4e931..b0ad972d575 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -560,44 +560,13 @@ class DocumentParser implements Closeable { return builder; } else if (token == XContentParser.Token.VALUE_NUMBER) { XContentParser.NumberType numberType = context.parser().numberType(); - if (numberType == XContentParser.NumberType.INT) { - if (context.parser().estimatedNumberType()) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); - if (builder == null) { - builder = MapperBuilders.longField(currentFieldName); - } - return builder; - } else { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer"); - if (builder == null) { - builder = MapperBuilders.integerField(currentFieldName); - } - return builder; - } - } else if (numberType == XContentParser.NumberType.LONG) { + if (numberType == XContentParser.NumberType.INT || numberType == XContentParser.NumberType.LONG) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { builder = MapperBuilders.longField(currentFieldName); } return builder; - } else if (numberType == XContentParser.NumberType.FLOAT) { - if (context.parser().estimatedNumberType()) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); - if (builder == null) { - // no templates are defined, we use float by default instead of double - // since this is much more space-efficient and should be enough most of - // the time - builder = MapperBuilders.floatField(currentFieldName); - } - return builder; - } else { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float"); - if (builder == null) { - builder = MapperBuilders.floatField(currentFieldName); - } - return builder; - } - } else if (numberType == XContentParser.NumberType.DOUBLE) { + } else if (numberType == XContentParser.NumberType.FLOAT || numberType == XContentParser.NumberType.DOUBLE) { Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { // no templates are defined, we use float by default instead of double From e4721fd02ad688bf597e254d4a23b2a443b088cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 9 Dec 2015 18:04:58 +0100 Subject: [PATCH 41/57] Addressing review comments --- .../search/highlight/HighlightBuilder.java | 21 +++------- .../highlight/HighlighterParseElement.java | 4 +- .../highlight/HighlightBuilderTests.java | 38 ++++++++++++++++--- 3 files changed, 40 insertions(+), 23 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java index 20f016851f2..e45303ccb58 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/HighlightBuilder.java @@ -33,11 +33,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions.Builder; -import org.elasticsearch.search.internal.SearchContext; - import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -84,14 +81,14 @@ public class HighlightBuilder extends AbstractHighlighterBuilder"}; /** the default opening tags when tag_schema = "styled" */ - public static final String[] STYLED_PRE_TAG = { + public static final String[] DEFAULT_STYLED_PRE_TAG = { "", "", "", "", "", "", "", "", "", "" }; /** the default closing tags when tag_schema = "styled" */ - public static final String[] STYLED_POST_TAGS = {""}; + public static final String[] DEFAULT_STYLED_POST_TAGS = {""}; /** * a {@link FieldOptions.Builder} with default settings @@ -181,8 +178,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder injectedQueryParsers = new HashSet<>(); injectedQueryParsers.add(new MatchAllQueryParser()); + injectedQueryParsers.add(new IdsQueryParser()); + injectedQueryParsers.add(new TermQueryParser()); indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); } @@ -277,7 +287,14 @@ public class HighlightBuilderTests extends ESTestCase { Index index = new Index(randomAsciiOfLengthBetween(1, 10)); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter - QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry); + QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, null, indicesQueriesRegistry) { + @Override + public MappedFieldType fieldMapper(String name) { + StringFieldMapper.Builder builder = MapperBuilders.stringField(name); + return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); + } + }; + mockShardContext.setMapUnmappedFieldAsString(true); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); @@ -338,9 +355,9 @@ public class HighlightBuilderTests extends ESTestCase { context.reset(parser); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); - assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.STYLED_PRE_TAG, + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.STYLED_POST_TAGS, + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, highlightBuilder.postTags()); highlightElement = "{\n" + @@ -423,9 +440,20 @@ public class HighlightBuilderTests extends ESTestCase { highlightBuilder.fragmenter(randomAsciiOfLengthBetween(1, 10)); } if (randomBoolean()) { - QueryBuilder highlightQuery = new MatchAllQueryBuilder(); + QueryBuilder highlightQuery; + switch (randomInt(2)) { + case 0: + highlightQuery = new MatchAllQueryBuilder(); + break; + case 1: + highlightQuery = new IdsQueryBuilder(); + break; + default: + case 2: + highlightQuery = new TermQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); + break; + } highlightQuery.boost((float) randomDoubleBetween(0, 10, false)); - highlightQuery.queryName(randomAsciiOfLength(10)); highlightBuilder.highlightQuery(highlightQuery); } if (randomBoolean()) { From bef0bedba9e4d3417205eee7a0601eaf9763a831 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 7 Dec 2015 17:23:53 +0100 Subject: [PATCH 42/57] Add support to _aliases endpoint to specify multiple indices and aliases in one action Closes #15305 --- .../indices/alias/IndicesAliasesRequest.java | 45 +++++++----------- .../alias/RestIndicesAliasesAction.java | 41 +++++++++++----- .../elasticsearch/aliases/IndexAliasesIT.java | 4 +- docs/reference/indices/aliases.asciidoc | 17 ++++++- .../test/indices.update_aliases/10_basic.yaml | 47 +++++++++++++++++++ 5 files changed, 112 insertions(+), 42 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 13b7ee92435..1da26627469 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -78,19 +78,19 @@ public class IndicesAliasesRequest extends AcknowledgedRequest filter = null; String routing = null; boolean routingSet = false; @@ -90,9 +92,9 @@ public class RestIndicesAliasesAction extends BaseRestHandler { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("index".equals(currentFieldName)) { - index = parser.text(); + indices = new String[] { parser.text() }; } else if ("alias".equals(currentFieldName)) { - alias = parser.text(); + aliases = new String[] { parser.text() }; } else if ("routing".equals(currentFieldName)) { routing = parser.textOrNull(); routingSet = true; @@ -103,6 +105,23 @@ public class RestIndicesAliasesAction extends BaseRestHandler { searchRouting = parser.textOrNull(); searchRoutingSet = true; } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("indices".equals(currentFieldName)) { + List indexNames = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + String index = parser.text(); + indexNames.add(index); + } + indices = indexNames.toArray(new String[indexNames.size()]); + } + if ("aliases".equals(currentFieldName)) { + List aliasNames = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + String alias = parser.text(); + aliasNames.add(alias); + } + aliases = aliasNames.toArray(new String[aliasNames.size()]); + } } else if (token == XContentParser.Token.START_OBJECT) { if ("filter".equals(currentFieldName)) { filter = parser.mapOrdered(); @@ -111,19 +130,19 @@ public class RestIndicesAliasesAction extends BaseRestHandler { } if (type == AliasAction.Type.ADD) { - AliasAction aliasAction = newAddAliasAction(index, alias).filter(filter); + AliasActions aliasActions = new AliasActions(type, indices, aliases); if (routingSet) { - aliasAction.routing(routing); + aliasActions.routing(routing); } if (indexRoutingSet) { - aliasAction.indexRouting(indexRouting); + aliasActions.indexRouting(indexRouting); } if (searchRoutingSet) { - aliasAction.searchRouting(searchRouting); + aliasActions.searchRouting(searchRouting); } - indicesAliasesRequest.addAliasAction(aliasAction); + indicesAliasesRequest.addAliasAction(aliasActions); } else if (type == AliasAction.Type.REMOVE) { - indicesAliasesRequest.removeAlias(index, alias); + indicesAliasesRequest.removeAlias(indices, aliases); } } } diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index 81b09c8c8e2..3a3876b60b1 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -759,7 +759,7 @@ public class IndexAliasesIT extends ESIntegTestCase { admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get(); fail("Expected ActionRequestValidationException"); } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("requires an [alias] to be set")); + assertThat(e.getMessage(), containsString("[alias] may not be empty string")); } } @@ -768,7 +768,7 @@ public class IndexAliasesIT extends ESIntegTestCase { admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get(); fail("Expected ActionRequestValidationException"); } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("requires an [alias] to be set")); + assertThat(e.getMessage(), containsString("[alias] may not be empty string")); } } diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 9a65c89837d..57faa9718f9 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -63,7 +63,22 @@ curl -XPOST 'http://localhost:9200/_aliases' -d ' }' -------------------------------------------------- -Alternatively, you can use a glob pattern to associate an alias to +Multiple indices can be specified for an action with the `indices` array syntax: + +[source,js] +-------------------------------------------------- +curl -XPOST 'http://localhost:9200/_aliases' -d ' +{ + "actions" : [ + { "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } } + ] +}' +-------------------------------------------------- + +To specify multiple aliases in one action, the corresponding `aliases` array +syntax exists as well. + +For the example above, a glob pattern can also be used to associate an alias to more than one index that share a common name: [source,js] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/10_basic.yaml index 5b45f740e44..041f6bb3cc0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/10_basic.yaml @@ -32,3 +32,50 @@ name: test_alias - match: {test_index.aliases.test_alias: {'index_routing': 'routing_value', 'search_routing': 'routing_value'}} + +--- +"Basic test for multiple aliases": + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias1 + + - is_false: '' + + - do: + indices.exists_alias: + name: test_alias2 + + - is_false: '' + + - do: + indices.update_aliases: + body: + actions: + - add: + indices: [test_index] + aliases: [test_alias1, test_alias2] + routing: routing_value + + - do: + indices.exists_alias: + name: test_alias1 + + - is_true: '' + + - do: + indices.exists_alias: + name: test_alias2 + + - is_true: '' + + - do: + indices.get_alias: + index: test_index + + - match: {test_index.aliases.test_alias1: {'index_routing': 'routing_value', 'search_routing': 'routing_value'}} + - match: {test_index.aliases.test_alias2: {'index_routing': 'routing_value', 'search_routing': 'routing_value'}} From f675801b2669ea7414ef0379e1c8b9812660aae5 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 9 Dec 2015 18:29:31 +0100 Subject: [PATCH 43/57] add more tests to network service Follow up for #15340 We test that bind with wilcard IP + fixed IP it raises an exception We test binding multiple IPs (cherry picked from commit 2cc5bb7) --- .../common/network/NetworkServiceTests.java | 63 +++++++++++++------ 1 file changed, 43 insertions(+), 20 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java index 13c2211bce5..7ec4756d784 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java @@ -24,14 +24,16 @@ import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; +import static org.hamcrest.Matchers.is; + /** * Tests for network service... try to keep them safe depending upon configuration * please don't actually bind to anything, just test the addresses. */ public class NetworkServiceTests extends ESTestCase { - /** - * ensure exception if we bind to multicast ipv4 address + /** + * ensure exception if we bind to multicast ipv4 address */ public void testBindMulticastV4() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); @@ -42,9 +44,8 @@ public class NetworkServiceTests extends ESTestCase { assertTrue(e.getMessage().contains("invalid: multicast")); } } - - /** - * ensure exception if we bind to multicast ipv6 address + /** + * ensure exception if we bind to multicast ipv6 address */ public void testBindMulticastV6() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); @@ -55,9 +56,9 @@ public class NetworkServiceTests extends ESTestCase { assertTrue(e.getMessage().contains("invalid: multicast")); } } - - /** - * ensure exception if we publish to multicast ipv4 address + + /** + * ensure exception if we publish to multicast ipv4 address */ public void testPublishMulticastV4() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); @@ -68,9 +69,9 @@ public class NetworkServiceTests extends ESTestCase { assertTrue(e.getMessage().contains("invalid: multicast")); } } - - /** - * ensure exception if we publish to multicast ipv6 address + + /** + * ensure exception if we publish to multicast ipv6 address */ public void testPublishMulticastV6() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); @@ -82,24 +83,24 @@ public class NetworkServiceTests extends ESTestCase { } } - /** - * ensure specifying wildcard ipv4 address will bind to all interfaces + /** + * ensure specifying wildcard ipv4 address will bind to all interfaces */ public void testBindAnyLocalV4() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); assertEquals(InetAddress.getByName("0.0.0.0"), service.resolveBindHostAddresses(new String[] { "0.0.0.0" })[0]); } - - /** - * ensure specifying wildcard ipv6 address will bind to all interfaces + + /** + * ensure specifying wildcard ipv6 address will bind to all interfaces */ public void testBindAnyLocalV6() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); assertEquals(InetAddress.getByName("::"), service.resolveBindHostAddresses(new String[] { "::" })[0]); } - /** - * ensure specifying wildcard ipv4 address selects reasonable publish address + /** + * ensure specifying wildcard ipv4 address selects reasonable publish address */ public void testPublishAnyLocalV4() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); @@ -107,12 +108,34 @@ public class NetworkServiceTests extends ESTestCase { assertFalse(address.isAnyLocalAddress()); } - /** - * ensure specifying wildcard ipv6 address selects reasonable publish address + /** + * ensure specifying wildcard ipv6 address selects reasonable publish address */ public void testPublishAnyLocalV6() throws Exception { NetworkService service = new NetworkService(Settings.EMPTY); InetAddress address = service.resolvePublishHostAddresses(new String[] { "::" }); assertFalse(address.isAnyLocalAddress()); } + + /** + * ensure we can bind to multiple addresses + */ + public void testBindMultipleAddresses() throws Exception { + NetworkService service = new NetworkService(Settings.EMPTY); + InetAddress[] addresses = service.resolveBindHostAddresses(new String[]{"127.0.0.1", "127.0.0.2"}); + assertThat(addresses.length, is(2)); + } + + /** + * ensure we can't bind to multiple addresses when using wildcard + */ + public void testBindMultipleAddressesWithWildcard() throws Exception { + NetworkService service = new NetworkService(Settings.EMPTY); + try { + service.resolveBindHostAddresses(new String[]{"0.0.0.0", "127.0.0.1"}); + fail("should have hit exception"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("is wildcard, but multiple addresses specified")); + } + } } From 61266dee19ef94d477214a81cccda6dd4303c39f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 19:21:35 -0500 Subject: [PATCH 44/57] mark messy rest test as messy and add to messy tests lists --- .../test/java/org/elasticsearch/messy/tests/package-info.java | 3 ++- .../{50_msearch.yaml => 50_messy_test_msearch.yaml} | 0 2 files changed, 2 insertions(+), 1 deletion(-) rename modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/{50_msearch.yaml => 50_messy_test_msearch.yaml} (100%) diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java index 46542313821..a2325b2d511 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -40,6 +40,7 @@ renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryParser renamed: core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java -> modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> module/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java ^^^^^ note: just the methods from this test using mustache were moved here, the others use groovy and are in the groovy module under its messy tests package. +renamed: rest-api-spec/test/msearch/10_basic.yaml -> module/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml */ -package org.elasticsearch.messy.tests; \ No newline at end of file +package org.elasticsearch.messy.tests; diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml similarity index 100% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_msearch.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_messy_test_msearch.yaml From da5b07ae13d873ad008af709d415b570ce85f876 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 9 Dec 2015 16:32:37 -0800 Subject: [PATCH 45/57] Added a new scripting language (PlanA). Closes #15136 --- .../elasticsearch/plugins/PluginManager.java | 1 + .../elasticsearch/plugins/plugin-install.help | 1 + dev-tools/smoke_test_rc.py | 1 + plugins/lang-plan-a/ant.xml | 145 + plugins/lang-plan-a/build.gradle | 48 + .../licenses/antlr4-runtime-4.5.1-1.jar.sha1 | 1 + .../licenses/antlr4-runtime-LICENSE.txt | 26 + .../licenses/antlr4-runtime-NOTICE.txt | 0 .../lang-plan-a/licenses/asm-5.0.4.jar.sha1 | 1 + plugins/lang-plan-a/licenses/asm-LICENSE.txt | 26 + plugins/lang-plan-a/licenses/asm-NOTICE.txt | 1 + .../licenses/asm-commons-5.0.4.jar.sha1 | 1 + .../licenses/asm-commons-LICENSE.txt | 26 + .../licenses/asm-commons-NOTICE.txt | 1 + .../lang-plan-a/src/main/antlr/PlanALexer.g4 | 120 + .../lang-plan-a/src/main/antlr/PlanAParser.g4 | 127 + .../org/elasticsearch/plan/a/Adapter.java | 276 ++ .../org/elasticsearch/plan/a/Analyzer.java | 2983 +++++++++++++++++ .../org/elasticsearch/plan/a/Compiler.java | 154 + .../plan/a/CompilerSettings.java | 49 + .../java/org/elasticsearch/plan/a/Def.java | 1250 +++++++ .../org/elasticsearch/plan/a/Definition.java | 1809 ++++++++++ .../plan/a/ErrorHandlingLexer.java | 45 + .../org/elasticsearch/plan/a/Executable.java | 50 + .../plan/a/ParserErrorStrategy.java | 74 + .../org/elasticsearch/plan/a/PlanALexer.java | 390 +++ .../org/elasticsearch/plan/a/PlanAParser.java | 2884 ++++++++++++++++ .../plan/a/PlanAParserBaseVisitor.java | 357 ++ .../plan/a/PlanAParserVisitor.java | 336 ++ .../org/elasticsearch/plan/a/PlanAPlugin.java | 40 + .../plan/a/PlanAScriptEngineService.java | 140 + .../org/elasticsearch/plan/a/ScriptImpl.java | 96 + .../org/elasticsearch/plan/a/Utility.java | 801 +++++ .../java/org/elasticsearch/plan/a/Writer.java | 2224 ++++++++++++ .../plugin-metadata/plugin-security.policy | 23 + .../elasticsearch/plan/a/AdditionTests.java | 199 ++ .../org/elasticsearch/plan/a/AndTests.java | 48 + .../plan/a/BasicExpressionTests.java | 126 + .../plan/a/BasicStatementTests.java | 178 + .../plan/a/BinaryOperatorTests.java | 294 ++ .../plan/a/CompoundAssignmentTests.java | 319 ++ .../plan/a/ConditionalTests.java | 93 + .../org/elasticsearch/plan/a/DefTests.java | 914 +++++ .../elasticsearch/plan/a/DivisionTests.java | 147 + .../org/elasticsearch/plan/a/EqualsTests.java | 184 + .../org/elasticsearch/plan/a/FieldTests.java | 108 + .../plan/a/FloatOverflowDisabledTests.java | 294 ++ .../plan/a/FloatOverflowEnabledTests.java | 144 + .../elasticsearch/plan/a/IncrementTests.java | 79 + .../plan/a/IntegerOverflowDisabledTests.java | 445 +++ .../plan/a/IntegerOverflowEnabledTests.java | 194 ++ .../plan/a/MultiplicationTests.java | 126 + .../elasticsearch/plan/a/NoSemiColonTest.java | 178 + .../org/elasticsearch/plan/a/OrTests.java | 48 + .../org/elasticsearch/plan/a/PlanARestIT.java | 49 + .../elasticsearch/plan/a/RemainderTests.java | 147 + .../plan/a/ScriptEngineTests.java | 109 + .../elasticsearch/plan/a/ScriptTestCase.java | 61 + .../org/elasticsearch/plan/a/StringTests.java | 75 + .../plan/a/SubtractionTests.java | 179 + .../org/elasticsearch/plan/a/UnaryTests.java | 42 + .../elasticsearch/plan/a/UtilityTests.java | 250 ++ .../plan/a/WhenThingsGoWrongTests.java | 41 + .../org/elasticsearch/plan/a/XorTests.java | 62 + .../rest-api-spec/test/plan_a/10_basic.yaml | 14 + .../test/plan_a/20_scriptfield.yaml | 27 + .../rest-api-spec/test/plan_a/30_search.yaml | 97 + .../plugins/PluginManagerTests.java | 1 + .../packaging/scripts/plugin_test_cases.bash | 24 + settings.gradle | 1 + 70 files changed, 19804 insertions(+) create mode 100644 plugins/lang-plan-a/ant.xml create mode 100644 plugins/lang-plan-a/build.gradle create mode 100644 plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 create mode 100644 plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt create mode 100644 plugins/lang-plan-a/licenses/antlr4-runtime-NOTICE.txt create mode 100644 plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 create mode 100644 plugins/lang-plan-a/licenses/asm-LICENSE.txt create mode 100644 plugins/lang-plan-a/licenses/asm-NOTICE.txt create mode 100644 plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 create mode 100644 plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt create mode 100644 plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt create mode 100644 plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 create mode 100644 plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java create mode 100644 plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java create mode 100644 plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java create mode 100644 plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java create mode 100644 plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml create mode 100644 plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml create mode 100644 plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 599395807d0..6600bf7035d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -83,6 +83,7 @@ public class PluginManager { "discovery-gce", "discovery-multicast", "lang-javascript", + "lang-plan-a", "lang-python", "mapper-attachments", "mapper-murmur3", diff --git a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help index e6622e2743a..2a4e6a6382c 100644 --- a/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help +++ b/core/src/main/resources/org/elasticsearch/plugins/plugin-install.help @@ -44,6 +44,7 @@ OFFICIAL PLUGINS - discovery-gce - discovery-multicast - lang-javascript + - lang-plan-a - lang-python - mapper-attachments - mapper-murmur3 diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index b7bc00df0ab..3fa61c4361f 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -70,6 +70,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "lang-expression", "lang-groovy", "lang-javascript", + "lang-plan-a", "lang-python", "mapper-murmur3", "mapper-size", diff --git a/plugins/lang-plan-a/ant.xml b/plugins/lang-plan-a/ant.xml new file mode 100644 index 00000000000..bf1c9b93757 --- /dev/null +++ b/plugins/lang-plan-a/ant.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle new file mode 100644 index 00000000000..618c094f683 --- /dev/null +++ b/plugins/lang-plan-a/build.gradle @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.tools.ant.types.Path + +esplugin { + description 'An easy, safe and fast scripting language for Elasticsearch' + classname 'org.elasticsearch.plan.a.PlanAPlugin' +} + +dependencies { + compile 'org.antlr:antlr4-runtime:4.5.1-1' + compile 'org.ow2.asm:asm:5.0.4' + compile 'org.ow2.asm:asm-commons:5.0.4' +} + +compileJava.options.compilerArgs << '-Xlint:-cast,-fallthrough,-rawtypes' +compileTestJava.options.compilerArgs << '-Xlint:-unchecked' + +// regeneration logic, comes in via ant right now +// don't port it to gradle, it works fine. + +configurations { + regenerate +} + +dependencies { + regenerate 'org.antlr:antlr4:4.5.1-1' +} + +ant.references['regenerate.classpath'] = new Path(ant.project, configurations.regenerate.asPath) +ant.importBuild 'ant.xml' diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 b/plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 new file mode 100644 index 00000000000..37f80b91724 --- /dev/null +++ b/plugins/lang-plan-a/licenses/antlr4-runtime-4.5.1-1.jar.sha1 @@ -0,0 +1 @@ +66144204f9d6d7d3f3f775622c2dd7e9bd511d97 \ No newline at end of file diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt b/plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt new file mode 100644 index 00000000000..95d0a2554f6 --- /dev/null +++ b/plugins/lang-plan-a/licenses/antlr4-runtime-LICENSE.txt @@ -0,0 +1,26 @@ +[The "BSD license"] +Copyright (c) 2015 Terence Parr, Sam Harwell +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/lang-plan-a/licenses/antlr4-runtime-NOTICE.txt b/plugins/lang-plan-a/licenses/antlr4-runtime-NOTICE.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 b/plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 new file mode 100644 index 00000000000..9223dba380f --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-5.0.4.jar.sha1 @@ -0,0 +1 @@ +0da08b8cce7bbf903602a25a3a163ae252435795 diff --git a/plugins/lang-plan-a/licenses/asm-LICENSE.txt b/plugins/lang-plan-a/licenses/asm-LICENSE.txt new file mode 100644 index 00000000000..afb064f2f26 --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012 France Télécom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/lang-plan-a/licenses/asm-NOTICE.txt b/plugins/lang-plan-a/licenses/asm-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 b/plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 new file mode 100644 index 00000000000..94fe0cd92c9 --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-commons-5.0.4.jar.sha1 @@ -0,0 +1 @@ +5a556786086c23cd689a0328f8519db93821c04c diff --git a/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt b/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt new file mode 100644 index 00000000000..afb064f2f26 --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-commons-LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2012 France Télécom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt b/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/lang-plan-a/licenses/asm-commons-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 b/plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 new file mode 100644 index 00000000000..5110a73e8ca --- /dev/null +++ b/plugins/lang-plan-a/src/main/antlr/PlanALexer.g4 @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +lexer grammar PlanALexer; + +@header { + import java.util.Set; +} + +@members { + private Set types = null; + + void setTypes(Set types) { + this.types = types; + } +} + +WS: [ \t\n\r]+ -> skip; +COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip; + +LBRACK: '{'; +RBRACK: '}'; +LBRACE: '['; +RBRACE: ']'; +LP: '('; +RP: ')'; +DOT: '.' -> mode(EXT); +COMMA: ','; +SEMICOLON: ';'; +IF: 'if'; +ELSE: 'else'; +WHILE: 'while'; +DO: 'do'; +FOR: 'for'; +CONTINUE: 'continue'; +BREAK: 'break'; +RETURN: 'return'; +NEW: 'new'; +TRY: 'try'; +CATCH: 'catch'; +THROW: 'throw'; + +BOOLNOT: '!'; +BWNOT: '~'; +MUL: '*'; +DIV: '/'; +REM: '%'; +ADD: '+'; +SUB: '-'; +LSH: '<<'; +RSH: '>>'; +USH: '>>>'; +LT: '<'; +LTE: '<='; +GT: '>'; +GTE: '>='; +EQ: '=='; +EQR: '==='; +NE: '!='; +NER: '!=='; +BWAND: '&'; +BWXOR: '^'; +BWOR: '|'; +BOOLAND: '&&'; +BOOLOR: '||'; +COND: '?'; +COLON: ':'; +INCR: '++'; +DECR: '--'; + +ASSIGN: '='; +AADD: '+='; +ASUB: '-='; +AMUL: '*='; +ADIV: '/='; +AREM: '%='; +AAND: '&='; +AXOR: '^='; +AOR: '|='; +ALSH: '<<='; +ARSH: '>>='; +AUSH: '>>>='; +ACAT: '..='; + +OCTAL: '0' [0-7]+ [lL]?; +HEX: '0' [xX] [0-9a-fA-F]+ [lL]?; +INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?; +DECIMAL: ( '0' | [1-9] [0-9]* ) DOT [0-9]* ( [eE] [+\-]? [0-9]+ )? [fF]?; + +STRING: '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' {setText(getText().substring(1, getText().length() - 1));}; +CHAR: '\'' . '\'' {setText(getText().substring(1, getText().length() - 1));}; + +TRUE: 'true'; +FALSE: 'false'; + +NULL: 'null'; + +TYPE: ID GENERIC? {types.contains(getText().replace(" ", ""))}? {setText(getText().replace(" ", ""));}; +fragment GENERIC: ' '* '<' ' '* ( ID GENERIC? ) ' '* ( COMMA ' '* ( ID GENERIC? ) ' '* )* '>'; +ID: [_a-zA-Z] [_a-zA-Z0-9]*; + +mode EXT; +EXTINTEGER: ( '0' | [1-9] [0-9]* ) -> mode(DEFAULT_MODE); +EXTID: [_a-zA-Z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE); diff --git a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 new file mode 100644 index 00000000000..1b177a43381 --- /dev/null +++ b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +parser grammar PlanAParser; + +options { tokenVocab=PlanALexer; } + +source + : statement+ EOF + ; + +statement + : IF LP expression RP block ( ELSE block )? # if + | WHILE LP expression RP ( block | empty ) # while + | DO block WHILE LP expression RP SEMICOLON? # do + | FOR LP initializer? SEMICOLON expression? SEMICOLON afterthought? RP ( block | empty ) # for + | declaration SEMICOLON? # decl + | CONTINUE SEMICOLON? # continue + | BREAK SEMICOLON? # break + | RETURN expression SEMICOLON? # return + | TRY block ( CATCH LP ( TYPE ID ) RP block )+ # try + | THROW expression SEMICOLON? # throw + | expression SEMICOLON? # expr + ; + +block + : LBRACK statement* RBRACK # multiple + | statement # single + ; + +empty + : SEMICOLON + ; + +initializer + : declaration + | expression + ; + +afterthought + : expression + ; + +declaration + : decltype declvar ( COMMA declvar )* + ; + +decltype + : TYPE (LBRACE RBRACE)* + ; + +declvar + : ID ( ASSIGN expression )? + ; + +expression + : LP expression RP # precedence + | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric + | CHAR # char + | TRUE # true + | FALSE # false + | NULL # null + | extstart increment # postinc + | increment extstart # preinc + | extstart # external + | ( BOOLNOT | BWNOT | ADD | SUB ) expression # unary + | LP decltype RP expression # cast + | expression ( MUL | DIV | REM ) expression # binary + | expression ( ADD | SUB ) expression # binary + | expression ( LSH | RSH | USH ) expression # binary + | expression ( LT | LTE | GT | GTE ) expression # comp + | expression ( EQ | EQR | NE | NER ) expression # comp + | expression BWAND expression # binary + | expression BWXOR expression # binary + | expression BWOR expression # binary + | expression BOOLAND expression # bool + | expression BOOLOR expression # bool + | expression COND expression COLON expression # conditional + | extstart ( ASSIGN | AADD | ASUB | AMUL | ADIV + | AREM | AAND | AXOR | AOR + | ALSH | ARSH | AUSH ) expression # assignment + ; + +extstart + : extprec + | extcast + | exttype + | extvar + | extnew + | extstring + ; + +extprec: LP ( extprec | extcast | exttype | extvar | extnew | extstring ) RP ( extdot | extbrace )?; +extcast: LP decltype RP ( extprec | extcast | exttype | extvar | extnew | extstring ); +extbrace: LBRACE expression RBRACE ( extdot | extbrace )?; +extdot: DOT ( extcall | extfield ); +exttype: TYPE extdot; +extcall: EXTID arguments ( extdot | extbrace )?; +extvar: ID ( extdot | extbrace )?; +extfield: ( EXTID | EXTINTEGER ) ( extdot | extbrace )?; +extnew: NEW TYPE ( ( arguments ( extdot | extbrace)? ) | ( ( LBRACE expression RBRACE )+ extdot? ) ); +extstring: STRING (extdot | extbrace )?; + +arguments + : ( LP ( expression ( COMMA expression )* )? RP ) + ; + +increment + : INCR + | DECR + ; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java new file mode 100644 index 00000000000..baa06f45ff8 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java @@ -0,0 +1,276 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.util.HashMap; +import java.util.Map; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ParseTree; + +import static org.elasticsearch.plan.a.Definition.*; +import static org.elasticsearch.plan.a.PlanAParser.*; + +class Adapter { + static class StatementMetadata { + final ParserRuleContext source; + + boolean last; + + boolean allExit; + boolean allReturn; + boolean anyReturn; + boolean allBreak; + boolean anyBreak; + boolean allContinue; + boolean anyContinue; + + private StatementMetadata(final ParserRuleContext source) { + this.source = source; + + last = false; + + allExit = false; + allReturn = false; + anyReturn = false; + allBreak = false; + anyBreak = false; + allContinue = false; + anyContinue = false; + } + } + + static class ExpressionMetadata { + final ParserRuleContext source; + + boolean read; + boolean statement; + + Object preConst; + Object postConst; + boolean isNull; + + Type to; + Type from; + boolean explicit; + boolean typesafe; + + Cast cast; + + private ExpressionMetadata(final ParserRuleContext source) { + this.source = source; + + read = true; + statement = false; + + preConst = null; + postConst = null; + isNull = false; + + to = null; + from = null; + explicit = false; + typesafe = true; + + cast = null; + } + } + + static class ExternalMetadata { + final ParserRuleContext source; + + boolean read; + ParserRuleContext storeExpr; + int token; + boolean pre; + boolean post; + + int scope; + Type current; + boolean statik; + boolean statement; + Object constant; + + private ExternalMetadata(final ParserRuleContext source) { + this.source = source; + + read = false; + storeExpr = null; + token = 0; + pre = false; + post = false; + + scope = 0; + current = null; + statik = false; + statement = false; + constant = null; + } + } + + static class ExtNodeMetadata { + final ParserRuleContext parent; + final ParserRuleContext source; + + Object target; + boolean last; + + Type type; + Type promote; + + Cast castFrom; + Cast castTo; + + private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + this.parent = parent; + this.source = source; + + target = null; + last = false; + + type = null; + promote = null; + + castFrom = null; + castTo = null; + } + } + + static String error(final ParserRuleContext ctx) { + return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; + } + + final Definition definition; + final String source; + final ParserRuleContext root; + final CompilerSettings settings; + + private final Map statementMetadata; + private final Map expressionMetadata; + private final Map externalMetadata; + private final Map extNodeMetadata; + + Adapter(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) { + this.definition = definition; + this.source = source; + this.root = root; + this.settings = settings; + + statementMetadata = new HashMap<>(); + expressionMetadata = new HashMap<>(); + externalMetadata = new HashMap<>(); + extNodeMetadata = new HashMap<>(); + } + + StatementMetadata createStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = new StatementMetadata(source); + statementMetadata.put(source, sourcesmd); + + return sourcesmd; + } + + StatementMetadata getStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = statementMetadata.get(source); + + if (sourcesmd == null) { + throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourcesmd; + } + + ExpressionContext updateExpressionTree(ExpressionContext source) { + if (source instanceof PrecedenceContext) { + final ParserRuleContext parent = source.getParent(); + int index = 0; + + for (final ParseTree child : parent.children) { + if (child == source) { + break; + } + + ++index; + } + + while (source instanceof PrecedenceContext) { + source = ((PrecedenceContext)source).expression(); + } + + parent.children.set(index, source); + } + + return source; + } + + ExpressionMetadata createExpressionMetadata(ParserRuleContext source) { + final ExpressionMetadata sourceemd = new ExpressionMetadata(source); + expressionMetadata.put(source, sourceemd); + + return sourceemd; + } + + ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { + final ExpressionMetadata sourceemd = expressionMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + ExternalMetadata createExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = new ExternalMetadata(source); + externalMetadata.put(source, sourceemd); + + return sourceemd; + } + + ExternalMetadata getExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = externalMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source); + extNodeMetadata.put(source, sourceemd); + + return sourceemd; + } + + ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java new file mode 100644 index 00000000000..a7e2986d633 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java @@ -0,0 +1,2983 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.util.ArrayDeque; +import java.util.Arrays; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.antlr.v4.runtime.ParserRuleContext; + +import static org.elasticsearch.plan.a.Adapter.*; +import static org.elasticsearch.plan.a.Definition.*; +import static org.elasticsearch.plan.a.PlanAParser.*; + +class Analyzer extends PlanAParserBaseVisitor { + private static class Variable { + final String name; + final Type type; + final int slot; + + private Variable(final String name, final Type type, final int slot) { + this.name = name; + this.type = type; + this.slot = slot; + } + } + + static void analyze(final Adapter adapter) { + new Analyzer(adapter); + } + + private final Adapter adapter; + private final Definition definition; + private final CompilerSettings settings; + + private final Deque scopes; + private final Deque variables; + + private Analyzer(final Adapter adapter) { + this.adapter = adapter; + definition = adapter.definition; + settings = adapter.settings; + + scopes = new ArrayDeque<>(); + variables = new ArrayDeque<>(); + + incrementScope(); + addVariable(null, "this", definition.execType); + addVariable(null, "input", definition.smapType); + + adapter.createStatementMetadata(adapter.root); + visit(adapter.root); + + decrementScope(); + } + + void incrementScope() { + scopes.push(0); + } + + void decrementScope() { + int remove = scopes.pop(); + + while (remove > 0) { + variables.pop(); + --remove; + } + } + + Variable getVariable(final String name) { + final Iterator itr = variables.iterator(); + + while (itr.hasNext()) { + final Variable variable = itr.next(); + + if (variable.name.equals(name)) { + return variable; + } + } + + return null; + } + + Variable addVariable(final ParserRuleContext source, final String name, final Type type) { + if (getVariable(name) != null) { + if (source == null) { + throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); + } else { + throw new IllegalArgumentException( + error(source) + "Variable name [" + name + "] already defined within the scope."); + } + } + + final Variable previous = variables.peekFirst(); + int slot = 0; + + if (previous != null) { + slot += previous.slot + previous.type.type.getSize(); + } + + final Variable variable = new Variable(name, type, slot); + variables.push(variable); + + final int update = scopes.pop() + 1; + scopes.push(update); + + return variable; + } + + @Override + public Void visitSource(final SourceContext ctx) { + final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + final List statectxs = ctx.statement(); + final StatementContext lastctx = statectxs.get(statectxs.size() - 1); + + incrementScope(); + + for (final StatementContext statectx : statectxs) { + if (sourcesmd.allExit) { + throw new IllegalArgumentException(error(statectx) + + "Statement will never be executed because all prior paths exit."); + } + + final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); + statesmd.last = statectx == lastctx; + visit(statectx); + + if (statesmd.anyContinue) { + throw new IllegalArgumentException(error(statectx) + + "Cannot have a continue statement outside of a loop."); + } + + if (statesmd.anyBreak) { + throw new IllegalArgumentException(error(statectx) + + "Cannot have a break statement outside of a loop."); + } + + sourcesmd.allExit = statesmd.allExit; + sourcesmd.allReturn = statesmd.allReturn; + } + + decrementScope(); + + return null; + } + + @Override + public Void visitIf(final IfContext ctx) { + final StatementMetadata ifsmd = adapter.getStatementMetadata(ctx); + + incrementScope(); + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + visit(exprctx); + markCast(expremd); + + if (expremd.postConst != null) { + throw new IllegalArgumentException(error(ctx) + "If statement is not necessary."); + } + + final BlockContext blockctx0 = ctx.block(0); + final StatementMetadata blocksmd0 = adapter.createStatementMetadata(blockctx0); + blocksmd0.last = ifsmd.last; + visit(blockctx0); + + ifsmd.anyReturn = blocksmd0.anyReturn; + ifsmd.anyBreak = blocksmd0.anyBreak; + ifsmd.anyContinue = blocksmd0.anyContinue; + + if (ctx.ELSE() != null) { + final BlockContext blockctx1 = ctx.block(1); + final StatementMetadata blocksmd1 = adapter.createStatementMetadata(blockctx1); + blocksmd1.last = ifsmd.last; + visit(blockctx1); + + ifsmd.allExit = blocksmd0.allExit && blocksmd1.allExit; + ifsmd.allReturn = blocksmd0.allReturn && blocksmd1.allReturn; + ifsmd.anyReturn |= blocksmd1.anyReturn; + ifsmd.allBreak = blocksmd0.allBreak && blocksmd1.allBreak; + ifsmd.anyBreak |= blocksmd1.anyBreak; + ifsmd.allContinue = blocksmd0.allContinue && blocksmd1.allContinue; + ifsmd.anyContinue |= blocksmd1.anyContinue; + } + + decrementScope(); + + return null; + } + + @Override + public Void visitWhile(final WhileContext ctx) { + final StatementMetadata whilesmd = adapter.getStatementMetadata(ctx); + + incrementScope(); + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + visit(exprctx); + markCast(expremd); + + boolean exitrequired = false; + + if (expremd.postConst != null) { + boolean constant = (boolean)expremd.postConst; + + if (!constant) { + throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); + } + + exitrequired = true; + } + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + visit(blockctx); + + if (blocksmd.allReturn) { + throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + } + + if (blocksmd.allBreak) { + throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + } + + if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { + whilesmd.allExit = true; + whilesmd.allReturn = true; + } + } else if (exitrequired) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + decrementScope(); + + return null; + } + + @Override + public Void visitDo(final DoContext ctx) { + final StatementMetadata dosmd = adapter.getStatementMetadata(ctx); + + incrementScope(); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + visit(blockctx); + + if (blocksmd.allReturn) { + throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + } + + if (blocksmd.allBreak) { + throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + } + + if (blocksmd.allContinue) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + visit(exprctx); + markCast(expremd); + + if (expremd.postConst != null) { + final boolean exitrequired = (boolean)expremd.postConst; + + if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { + dosmd.allExit = true; + dosmd.allReturn = true; + } + + if (!exitrequired && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths exit so the loop is not necessary."); + } + } + + decrementScope(); + + return null; + } + + @Override + public Void visitFor(final ForContext ctx) { + final StatementMetadata forsmd = adapter.getStatementMetadata(ctx); + boolean exitrequired = false; + + incrementScope(); + + final InitializerContext initctx = ctx.initializer(); + + if (initctx != null) { + adapter.createStatementMetadata(initctx); + visit(initctx); + } + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = definition.booleanType; + visit(exprctx); + markCast(expremd); + + if (expremd.postConst != null) { + boolean constant = (boolean)expremd.postConst; + + if (!constant) { + throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); + } + + exitrequired = true; + } + } else { + exitrequired = true; + } + + final AfterthoughtContext atctx = ctx.afterthought(); + + if (atctx != null) { + adapter.createStatementMetadata(atctx); + visit(atctx); + } + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + visit(blockctx); + + if (blocksmd.allReturn) { + throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + } + + if (blocksmd.allBreak) { + throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + } + + if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { + forsmd.allExit = true; + forsmd.allReturn = true; + } + } else if (exitrequired) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } + + decrementScope(); + + return null; + } + + @Override + public Void visitDecl(final DeclContext ctx) { + final DeclarationContext declctx = ctx.declaration(); + adapter.createStatementMetadata(declctx); + visit(declctx); + + return null; + } + + @Override + public Void visitContinue(final ContinueContext ctx) { + final StatementMetadata continuesmd = adapter.getStatementMetadata(ctx); + + continuesmd.allExit = true; + continuesmd.allContinue = true; + continuesmd.anyContinue = true; + + return null; + } + + @Override + public Void visitBreak(final BreakContext ctx) { + final StatementMetadata breaksmd = adapter.getStatementMetadata(ctx); + + breaksmd.allExit = true; + breaksmd.allBreak = true; + breaksmd.anyBreak = true; + + return null; + } + + @Override + public Void visitReturn(final ReturnContext ctx) { + final StatementMetadata returnsmd = adapter.getStatementMetadata(ctx); + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = definition.objectType; + visit(exprctx); + markCast(expremd); + + returnsmd.allExit = true; + returnsmd.allReturn = true; + returnsmd.anyReturn = true; + + return null; + } + + @Override + public Void visitExpr(final ExprContext ctx) { + final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.read = exprsmd.last; + visit(exprctx); + + if (!expremd.statement && !exprsmd.last) { + throw new IllegalArgumentException(error(ctx) + "Not a statement."); + } + + final boolean rtn = exprsmd.last && expremd.from.sort != Sort.VOID; + exprsmd.allExit = rtn; + exprsmd.allReturn = rtn; + exprsmd.anyReturn = rtn; + expremd.to = rtn ? definition.objectType : expremd.from; + markCast(expremd); + + return null; + } + + @Override + public Void visitMultiple(final MultipleContext ctx) { + final StatementMetadata multiplesmd = adapter.getStatementMetadata(ctx); + final List statectxs = ctx.statement(); + final StatementContext lastctx = statectxs.get(statectxs.size() - 1); + + for (StatementContext statectx : statectxs) { + if (multiplesmd.allExit) { + throw new IllegalArgumentException(error(statectx) + + "Statement will never be executed because all prior paths exit."); + } + + final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); + statesmd.last = multiplesmd.last && statectx == lastctx; + visit(statectx); + + multiplesmd.allExit = statesmd.allExit; + multiplesmd.allReturn = statesmd.allReturn && !statesmd.anyBreak && !statesmd.anyContinue; + multiplesmd.anyReturn |= statesmd.anyReturn; + multiplesmd.allBreak = !statesmd.anyReturn && statesmd.allBreak && !statesmd.anyContinue; + multiplesmd.anyBreak |= statesmd.anyBreak; + multiplesmd.allContinue = !statesmd.anyReturn && !statesmd.anyBreak && statesmd.allContinue; + multiplesmd.anyContinue |= statesmd.anyContinue; + } + + return null; + } + + @Override + public Void visitSingle(final SingleContext ctx) { + final StatementMetadata singlesmd = adapter.getStatementMetadata(ctx); + + final StatementContext statectx = ctx.statement(); + final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); + statesmd.last = singlesmd.last; + visit(statectx); + + singlesmd.allExit = statesmd.allExit; + singlesmd.allReturn = statesmd.allReturn; + singlesmd.anyReturn = statesmd.anyReturn; + singlesmd.allBreak = statesmd.allBreak; + singlesmd.anyBreak = statesmd.anyBreak; + singlesmd.allContinue = statesmd.allContinue; + singlesmd.anyContinue = statesmd.anyContinue; + + return null; + } + + @Override + public Void visitEmpty(final EmptyContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + } + + @Override + public Void visitInitializer(InitializerContext ctx) { + final DeclarationContext declctx = ctx.declaration(); + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + + if (declctx != null) { + adapter.createStatementMetadata(declctx); + visit(declctx); + } else if (exprctx != null) { + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.read = false; + visit(exprctx); + + expremd.to = expremd.from; + markCast(expremd); + + if (!expremd.statement) { + throw new IllegalArgumentException(error(exprctx) + + "The intializer of a for loop must be a statement."); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + return null; + } + + @Override + public Void visitAfterthought(AfterthoughtContext ctx) { + ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.read = false; + visit(exprctx); + + expremd.to = expremd.from; + markCast(expremd); + + if (!expremd.statement) { + throw new IllegalArgumentException(error(exprctx) + + "The afterthought of a for loop must be a statement."); + } + } + + return null; + } + + @Override + public Void visitDeclaration(final DeclarationContext ctx) { + final DecltypeContext decltypectx = ctx.decltype(); + final ExpressionMetadata decltypeemd = adapter.createExpressionMetadata(decltypectx); + visit(decltypectx); + + for (final DeclvarContext declvarctx : ctx.declvar()) { + final ExpressionMetadata declvaremd = adapter.createExpressionMetadata(declvarctx); + declvaremd.to = decltypeemd.from; + visit(declvarctx); + } + + return null; + } + + @Override + public Void visitDecltype(final DecltypeContext ctx) { + final ExpressionMetadata decltypeemd = adapter.getExpressionMetadata(ctx); + + final String name = ctx.getText(); + decltypeemd.from = definition.getType(name); + + return null; + } + + @Override + public Void visitDeclvar(final DeclvarContext ctx) { + final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + + final String name = ctx.ID().getText(); + declvaremd.postConst = addVariable(ctx, name, declvaremd.to).slot; + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + + if (exprctx != null) { + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = declvaremd.to; + visit(exprctx); + markCast(expremd); + } + + return null; + } + + @Override + public Void visitPrecedence(final PrecedenceContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + } + + @Override + public Void visitNumeric(final NumericContext ctx) { + final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; + + if (ctx.DECIMAL() != null) { + final String svalue = (negate ? "-" : "") + ctx.DECIMAL().getText(); + + if (svalue.endsWith("f") || svalue.endsWith("F")) { + try { + numericemd.from = definition.floatType; + numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else { + try { + numericemd.from = definition.doubleType; + numericemd.preConst = Double.parseDouble(svalue); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid double constant [" + svalue + "]."); + } + } + } else { + String svalue = negate ? "-" : ""; + int radix; + + if (ctx.OCTAL() != null) { + svalue += ctx.OCTAL().getText(); + radix = 8; + } else if (ctx.INTEGER() != null) { + svalue += ctx.INTEGER().getText(); + radix = 10; + } else if (ctx.HEX() != null) { + svalue += ctx.HEX().getText(); + radix = 16; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + if (svalue.endsWith("d") || svalue.endsWith("D")) { + try { + numericemd.from = definition.doubleType; + numericemd.preConst = Double.parseDouble(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else if (svalue.endsWith("f") || svalue.endsWith("F")) { + try { + numericemd.from = definition.floatType; + numericemd.preConst = Float.parseFloat(svalue.substring(0, svalue.length() - 1)); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid float constant [" + svalue + "]."); + } + } else if (svalue.endsWith("l") || svalue.endsWith("L")) { + try { + numericemd.from = definition.longType; + numericemd.preConst = Long.parseLong(svalue.substring(0, svalue.length() - 1), radix); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid long constant [" + svalue + "]."); + } + } else { + try { + final Type type = numericemd.to; + final Sort sort = type == null ? Sort.INT : type.sort; + final int value = Integer.parseInt(svalue, radix); + + if (sort == Sort.BYTE && value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE) { + numericemd.from = definition.byteType; + numericemd.preConst = (byte)value; + } else if (sort == Sort.CHAR && value >= Character.MIN_VALUE && value <= Character.MAX_VALUE) { + numericemd.from = definition.charType; + numericemd.preConst = (char)value; + } else if (sort == Sort.SHORT && value >= Short.MIN_VALUE && value <= Short.MAX_VALUE) { + numericemd.from = definition.shortType; + numericemd.preConst = (short)value; + } else { + numericemd.from = definition.intType; + numericemd.preConst = value; + } + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid int constant [" + svalue + "]."); + } + } + } + + return null; + } + + @Override + public Void visitChar(final CharContext ctx) { + final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + + if (ctx.CHAR() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + charemd.preConst = ctx.CHAR().getText().charAt(0); + charemd.from = definition.charType; + + return null; + } + + @Override + public Void visitTrue(final TrueContext ctx) { + final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + + if (ctx.TRUE() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + trueemd.preConst = true; + trueemd.from = definition.booleanType; + + return null; + } + + @Override + public Void visitFalse(final FalseContext ctx) { + final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + + if (ctx.FALSE() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + falseemd.preConst = false; + falseemd.from = definition.booleanType; + + return null; + } + + @Override + public Void visitNull(final NullContext ctx) { + final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + + if (ctx.NULL() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + nullemd.isNull = true; + + if (nullemd.to != null) { + if (nullemd.to.sort.primitive) { + throw new IllegalArgumentException("Cannot cast null to a primitive type [" + nullemd.to.name + "]."); + } + + nullemd.from = nullemd.to; + } else { + nullemd.from = definition.objectType; + } + + return null; + } + + @Override + public Void visitExternal(final ExternalContext ctx) { + final ExpressionMetadata extemd = adapter.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + extstartemd.read = extemd.read; + visit(extstartctx); + + extemd.statement = extstartemd.statement; + extemd.preConst = extstartemd.constant; + extemd.from = extstartemd.current; + extemd.typesafe = extstartemd.current.sort != Sort.DEF; + + return null; + } + + @Override + public Void visitPostinc(final PostincContext ctx) { + final ExpressionMetadata postincemd = adapter.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + extstartemd.read = postincemd.read; + extstartemd.storeExpr = ctx.increment(); + extstartemd.token = ADD; + extstartemd.post = true; + visit(extstartctx); + + postincemd.statement = true; + postincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + postincemd.typesafe = extstartemd.current.sort != Sort.DEF; + + return null; + } + + @Override + public Void visitPreinc(final PreincContext ctx) { + final ExpressionMetadata preincemd = adapter.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + extstartemd.read = preincemd.read; + extstartemd.storeExpr = ctx.increment(); + extstartemd.token = ADD; + extstartemd.pre = true; + visit(extstartctx); + + preincemd.statement = true; + preincemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + preincemd.typesafe = extstartemd.current.sort != Sort.DEF; + + return null; + } + + @Override + public Void visitUnary(final UnaryContext ctx) { + final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + + if (ctx.BOOLNOT() != null) { + expremd.to = definition.booleanType; + visit(exprctx); + markCast(expremd); + + if (expremd.postConst != null) { + unaryemd.preConst = !(boolean)expremd.postConst; + } + + unaryemd.from = definition.booleanType; + } else if (ctx.BWNOT() != null || ctx.ADD() != null || ctx.SUB() != null) { + visit(exprctx); + + final Type promote = promoteNumeric(expremd.from, ctx.BWNOT() == null, true); + + if (promote == null) { + throw new ClassCastException("Cannot apply [" + ctx.getChild(0).getText() + "] " + + "operation to type [" + expremd.from.name + "]."); + } + + expremd.to = promote; + markCast(expremd); + + if (expremd.postConst != null) { + final Sort sort = promote.sort; + + if (ctx.BWNOT() != null) { + if (sort == Sort.INT) { + unaryemd.preConst = ~(int)expremd.postConst; + } else if (sort == Sort.LONG) { + unaryemd.preConst = ~(long)expremd.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.SUB() != null) { + if (exprctx instanceof NumericContext) { + unaryemd.preConst = expremd.postConst; + } else { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + unaryemd.preConst = -(int)expremd.postConst; + } else { + unaryemd.preConst = Math.negateExact((int)expremd.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + unaryemd.preConst = -(long)expremd.postConst; + } else { + unaryemd.preConst = Math.negateExact((long)expremd.postConst); + } + } else if (sort == Sort.FLOAT) { + unaryemd.preConst = -(float)expremd.postConst; + } else if (sort == Sort.DOUBLE) { + unaryemd.preConst = -(double)expremd.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + } else if (ctx.ADD() != null) { + if (sort == Sort.INT) { + unaryemd.preConst = +(int)expremd.postConst; + } else if (sort == Sort.LONG) { + unaryemd.preConst = +(long)expremd.postConst; + } else if (sort == Sort.FLOAT) { + unaryemd.preConst = +(float)expremd.postConst; + } else if (sort == Sort.DOUBLE) { + unaryemd.preConst = +(double)expremd.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + + unaryemd.from = promote; + unaryemd.typesafe = expremd.typesafe; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + return null; + } + + @Override + public Void visitCast(final CastContext ctx) { + final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + + final DecltypeContext decltypectx = ctx.decltype(); + final ExpressionMetadata decltypemd = adapter.createExpressionMetadata(decltypectx); + visit(decltypectx); + + final Type type = decltypemd.from; + castemd.from = type; + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = type; + expremd.explicit = true; + visit(exprctx); + markCast(expremd); + + if (expremd.postConst != null) { + castemd.preConst = expremd.postConst; + } + + castemd.typesafe = expremd.typesafe && castemd.from.sort != Sort.DEF; + + return null; + } + + @Override + public Void visitBinary(final BinaryContext ctx) { + final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + visit(exprctx0); + + final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + visit(exprctx1); + + final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; + final boolean add = ctx.ADD() != null; + final boolean xor = ctx.BWXOR() != null; + final Type promote = add ? promoteAdd(expremd0.from, expremd1.from) : + xor ? promoteXor(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, decimal, true); + + if (promote == null) { + throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + } + + final Sort sort = promote.sort; + expremd0.to = add && sort == Sort.STRING ? expremd0.from : promote; + expremd1.to = add && sort == Sort.STRING ? expremd1.from : promote; + markCast(expremd0); + markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + if (ctx.MUL() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst * (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.multiplyExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst * (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.multiplyExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst * (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.multiplyWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst * (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.multiplyWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.DIV() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst / (int)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst / (long)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst / (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst / (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.divideWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.REM() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst % (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst % (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst % (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.remainderWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst % (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.remainderWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.ADD() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst + (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.addExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst + (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.addExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst + (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.addWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst + (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.addWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else if (sort == Sort.STRING) { + binaryemd.preConst = "" + expremd0.postConst + expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.SUB() != null) { + if (sort == Sort.INT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (int)expremd0.postConst - (int)expremd1.postConst; + } else { + binaryemd.preConst = Math.subtractExact((int)expremd0.postConst, (int)expremd1.postConst); + } + } else if (sort == Sort.LONG) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (long)expremd0.postConst - (long)expremd1.postConst; + } else { + binaryemd.preConst = Math.subtractExact((long)expremd0.postConst, (long)expremd1.postConst); + } + } else if (sort == Sort.FLOAT) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (float)expremd0.postConst - (float)expremd1.postConst; + } else { + binaryemd.preConst = Utility.subtractWithoutOverflow((float)expremd0.postConst, (float)expremd1.postConst); + } + } else if (sort == Sort.DOUBLE) { + if (settings.getNumericOverflow()) { + binaryemd.preConst = (double)expremd0.postConst - (double)expremd1.postConst; + } else { + binaryemd.preConst = Utility.subtractWithoutOverflow((double)expremd0.postConst, (double)expremd1.postConst); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.LSH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst << (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst << (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.RSH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst >> (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst >> (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.USH() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst >>> (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst >>> (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.BWAND() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst & (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst & (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.BWXOR() != null) { + if (sort == Sort.BOOL) { + binaryemd.preConst = (boolean)expremd0.postConst ^ (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst ^ (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst ^ (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else if (ctx.BWOR() != null) { + if (sort == Sort.INT) { + binaryemd.preConst = (int)expremd0.postConst | (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + binaryemd.preConst = (long)expremd0.postConst | (long)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + + binaryemd.from = promote; + binaryemd.typesafe = expremd0.typesafe && expremd1.typesafe; + + return null; + } + + @Override + public Void visitComp(final CompContext ctx) { + final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final boolean equality = ctx.EQ() != null || ctx.NE() != null; + final boolean reference = ctx.EQR() != null || ctx.NER() != null; + + final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + visit(exprctx0); + + final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + visit(exprctx1); + + if (expremd0.isNull && expremd1.isNull) { + throw new IllegalArgumentException(error(ctx) + "Unnecessary comparison of null constants."); + } + + final Type promote = equality ? promoteEquality(expremd0.from, expremd1.from) : + reference ? promoteReference(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, true, true); + + if (promote == null) { + throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + } + + expremd0.to = promote; + expremd1.to = promote; + markCast(expremd0); + markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + final Sort sort = promote.sort; + + if (ctx.EQ() != null || ctx.EQR() != null) { + if (sort == Sort.BOOL) { + compemd.preConst = (boolean)expremd0.postConst == (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst == (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst == (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst == (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst == (double)expremd1.postConst; + } else { + if (ctx.EQ() != null && !expremd0.isNull && !expremd1.isNull) { + compemd.preConst = expremd0.postConst.equals(expremd1.postConst); + } else if (ctx.EQR() != null) { + compemd.preConst = expremd0.postConst == expremd1.postConst; + } + } + } else if (ctx.NE() != null || ctx.NER() != null) { + if (sort == Sort.BOOL) { + compemd.preConst = (boolean)expremd0.postConst != (boolean)expremd1.postConst; + } else if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst != (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst != (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst != (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst != (double)expremd1.postConst; + } else { + if (ctx.NE() != null && !expremd0.isNull && !expremd1.isNull) { + compemd.preConst = expremd0.postConst.equals(expremd1.postConst); + } else if (ctx.NER() != null) { + compemd.preConst = expremd0.postConst == expremd1.postConst; + } + } + } else if (ctx.GTE() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst >= (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst >= (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst >= (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst >= (double)expremd1.postConst; + } + } else if (ctx.GT() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst > (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst > (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst > (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst > (double)expremd1.postConst; + } + } else if (ctx.LTE() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst <= (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst <= (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst <= (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst <= (double)expremd1.postConst; + } + } else if (ctx.LT() != null) { + if (sort == Sort.INT) { + compemd.preConst = (int)expremd0.postConst < (int)expremd1.postConst; + } else if (sort == Sort.LONG) { + compemd.preConst = (long)expremd0.postConst < (long)expremd1.postConst; + } else if (sort == Sort.FLOAT) { + compemd.preConst = (float)expremd0.postConst < (float)expremd1.postConst; + } else if (sort == Sort.DOUBLE) { + compemd.preConst = (double)expremd0.postConst < (double)expremd1.postConst; + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + + compemd.from = definition.booleanType; + compemd.typesafe = expremd0.typesafe && expremd1.typesafe; + + return null; + } + + @Override + public Void visitBool(final BoolContext ctx) { + final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + expremd0.to = definition.booleanType; + visit(exprctx0); + markCast(expremd0); + + final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + expremd1.to = definition.booleanType; + visit(exprctx1); + markCast(expremd1); + + if (expremd0.postConst != null && expremd1.postConst != null) { + if (ctx.BOOLAND() != null) { + boolemd.preConst = (boolean)expremd0.postConst && (boolean)expremd1.postConst; + } else if (ctx.BOOLOR() != null) { + boolemd.preConst = (boolean)expremd0.postConst || (boolean)expremd1.postConst; + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + + boolemd.from = definition.booleanType; + boolemd.typesafe = expremd0.typesafe && expremd1.typesafe; + + return null; + } + + @Override + public Void visitConditional(final ConditionalContext ctx) { + final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + + final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + expremd0.to = definition.booleanType; + visit(exprctx0); + markCast(expremd0); + + if (expremd0.postConst != null) { + throw new IllegalArgumentException(error(ctx) + "Unnecessary conditional statement."); + } + + final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + expremd1.to = condemd.to; + expremd1.explicit = condemd.explicit; + visit(exprctx1); + + final ExpressionContext exprctx2 = adapter.updateExpressionTree(ctx.expression(2)); + final ExpressionMetadata expremd2 = adapter.createExpressionMetadata(exprctx2); + expremd2.to = condemd.to; + expremd2.explicit = condemd.explicit; + visit(exprctx2); + + if (condemd.to == null) { + final Type promote = promoteConditional(expremd1.from, expremd2.from, expremd1.preConst, expremd2.preConst); + + expremd1.to = promote; + expremd2.to = promote; + condemd.from = promote; + } else { + condemd.from = condemd.to; + } + + markCast(expremd1); + markCast(expremd2); + + condemd.typesafe = expremd0.typesafe && expremd1.typesafe; + + return null; + } + + @Override + public Void visitAssignment(final AssignmentContext ctx) { + final ExpressionMetadata assignemd = adapter.getExpressionMetadata(ctx); + + final ExtstartContext extstartctx = ctx.extstart(); + final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + + extstartemd.read = assignemd.read; + extstartemd.storeExpr = adapter.updateExpressionTree(ctx.expression()); + + if (ctx.AMUL() != null) { + extstartemd.token = MUL; + } else if (ctx.ADIV() != null) { + extstartemd.token = DIV; + } else if (ctx.AREM() != null) { + extstartemd.token = REM; + } else if (ctx.AADD() != null) { + extstartemd.token = ADD; + } else if (ctx.ASUB() != null) { + extstartemd.token = SUB; + } else if (ctx.ALSH() != null) { + extstartemd.token = LSH; + } else if (ctx.AUSH() != null) { + extstartemd.token = USH; + } else if (ctx.ARSH() != null) { + extstartemd.token = RSH; + } else if (ctx.AAND() != null) { + extstartemd.token = BWAND; + } else if (ctx.AXOR() != null) { + extstartemd.token = BWXOR; + } else if (ctx.AOR() != null) { + extstartemd.token = BWOR; + } + + visit(extstartctx); + + assignemd.statement = true; + assignemd.from = extstartemd.read ? extstartemd.current : definition.voidType; + assignemd.typesafe = extstartemd.current.sort != Sort.DEF; + + return null; + } + + @Override + public Void visitExtstart(final ExtstartContext ctx) { + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + adapter.createExtNodeMetadata(ctx, precctx); + visit(precctx); + } else if (castctx != null) { + adapter.createExtNodeMetadata(ctx, castctx); + visit(castctx); + } else if (typectx != null) { + adapter.createExtNodeMetadata(ctx, typectx); + visit(typectx); + } else if (varctx != null) { + adapter.createExtNodeMetadata(ctx, varctx); + visit(varctx); + } else if (newctx != null) { + adapter.createExtNodeMetadata(ctx, newctx); + visit(newctx); + } else if (stringctx != null) { + adapter.createExtNodeMetadata(ctx, stringctx); + visit(stringctx); + } else { + throw new IllegalStateException(); + } + + return null; + } + + @Override + public Void visitExtprec(final ExtprecContext ctx) { + final ExtNodeMetadata precenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = precenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null || bracectx != null) { + ++parentemd.scope; + } + + if (precctx != null) { + adapter.createExtNodeMetadata(parent, precctx); + visit(precctx); + } else if (castctx != null) { + adapter.createExtNodeMetadata(parent, castctx); + visit(castctx); + } else if (typectx != null) { + adapter.createExtNodeMetadata(parent, typectx); + visit(typectx); + } else if (varctx != null) { + adapter.createExtNodeMetadata(parent, varctx); + visit(varctx); + } else if (newctx != null) { + adapter.createExtNodeMetadata(parent, newctx); + visit(newctx); + } else if (stringctx != null) { + adapter.createExtNodeMetadata(ctx, stringctx); + visit(stringctx); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + parentemd.statement = false; + + if (dotctx != null) { + --parentemd.scope; + + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + --parentemd.scope; + + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtcast(final ExtcastContext ctx) { + final ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = castenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + adapter.createExtNodeMetadata(parent, precctx); + visit(precctx); + } else if (castctx != null) { + adapter.createExtNodeMetadata(parent, castctx); + visit(castctx); + } else if (typectx != null) { + adapter.createExtNodeMetadata(parent, typectx); + visit(typectx); + } else if (varctx != null) { + adapter.createExtNodeMetadata(parent, varctx); + visit(varctx); + } else if (newctx != null) { + adapter.createExtNodeMetadata(parent, newctx); + visit(newctx); + } else if (stringctx != null) { + adapter.createExtNodeMetadata(ctx, stringctx); + visit(stringctx); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + final DecltypeContext declctx = ctx.decltype(); + final ExpressionMetadata declemd = adapter.createExpressionMetadata(declctx); + visit(declctx); + + castenmd.castTo = getLegalCast(ctx, parentemd.current, declemd.from, true); + castenmd.type = declemd.from; + parentemd.current = declemd.from; + parentemd.statement = false; + + return null; + } + + @Override + public Void visitExtbrace(final ExtbraceContext ctx) { + final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = braceenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final boolean array = parentemd.current.sort == Sort.ARRAY; + final boolean def = parentemd.current.sort == Sort.DEF; + boolean map = false; + boolean list = false; + + try { + parentemd.current.clazz.asSubclass(Map.class); + map = true; + } catch (ClassCastException exception) { + // Do nothing. + } + + try { + parentemd.current.clazz.asSubclass(List.class); + list = true; + } catch (ClassCastException exception) { + // Do nothing. + } + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + + if (array || def) { + expremd.to = array ? definition.intType : definition.objectType; + visit(exprctx); + markCast(expremd); + + braceenmd.target = "#brace"; + braceenmd.type = def ? definition.defType : + definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); + analyzeLoadStoreExternal(ctx); + parentemd.current = braceenmd.type; + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + } else { + final boolean store = braceenmd.last && parentemd.storeExpr != null; + final boolean get = parentemd.read || parentemd.token > 0 || !braceenmd.last; + final boolean set = braceenmd.last && store; + + Method getter; + Method setter; + Type valuetype; + Type settype; + + if (map) { + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("put"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal map get shortcut for type [" + parentemd.current.name + "]."); + } + + if (setter != null && setter.arguments.size() != 2) { + throw new IllegalArgumentException(error(ctx) + + "Illegal map set shortcut for type [" + parentemd.current.name + "]."); + } + + if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) + || !getter.rtn.equals(setter.arguments.get(1)))) { + throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + } + + valuetype = setter != null ? setter.arguments.get(0) : getter != null ? getter.arguments.get(0) : null; + settype = setter == null ? null : setter.arguments.get(1); + } else if (list) { + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("add"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal list get shortcut for type [" + parentemd.current.name + "]."); + } + + if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal list set shortcut for type [" + parentemd.current.name + "]."); + } + + if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) + || !getter.rtn.equals(setter.arguments.get(1)))) { + throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + } + + valuetype = definition.intType; + settype = setter == null ? null : setter.arguments.get(1); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + if ((get || set) && (!get || getter != null) && (!set || setter != null)) { + expremd.to = valuetype; + visit(exprctx); + markCast(expremd); + + braceenmd.target = new Object[] {getter, setter, true, null}; + braceenmd.type = get ? getter.rtn : settype; + analyzeLoadStoreExternal(ctx); + parentemd.current = get ? getter.rtn : setter.rtn; + } + } + + if (braceenmd.target == null) { + throw new IllegalArgumentException(error(ctx) + + "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); + } + + return null; + } + + @Override + public Void visitExtdot(final ExtdotContext ctx) { + final ExtNodeMetadata dotemnd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = dotemnd.parent; + + final ExtcallContext callctx = ctx.extcall(); + final ExtfieldContext fieldctx = ctx.extfield(); + + if (callctx != null) { + adapter.createExtNodeMetadata(parent, callctx); + visit(callctx); + } else if (fieldctx != null) { + adapter.createExtNodeMetadata(parent, fieldctx); + visit(fieldctx); + } + + return null; + } + + @Override + public Void visitExttype(final ExttypeContext ctx) { + final ExtNodeMetadata typeenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = typeenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + if (parentemd.current != null) { + throw new IllegalArgumentException(error(ctx) + "Unexpected static type."); + } + + final String typestr = ctx.TYPE().getText(); + typeenmd.type = definition.getType(typestr); + parentemd.current = typeenmd.type; + parentemd.statik = true; + + final ExtdotContext dotctx = ctx.extdot(); + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + + return null; + } + + @Override + public Void visitExtcall(final ExtcallContext ctx) { + final ExtNodeMetadata callenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = callenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + callenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final String name = ctx.EXTID().getText(); + + if (parentemd.current.sort == Sort.ARRAY) { + throw new IllegalArgumentException(error(ctx) + "Unexpected call [" + name + "] on an array."); + } else if (callenmd.last && parentemd.storeExpr != null) { + throw new IllegalArgumentException(error(ctx) + "Cannot assign a value to a call [" + name + "]."); + } + + final Struct struct = parentemd.current.struct; + final List arguments = ctx.arguments().expression(); + final int size = arguments.size(); + Type[] types; + + final Method method = parentemd.statik ? struct.functions.get(name) : struct.methods.get(name); + final boolean def = parentemd.current.sort == Sort.DEF; + + if (method == null && !def) { + throw new IllegalArgumentException( + error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); + } else if (method != null) { + types = new Type[method.arguments.size()]; + method.arguments.toArray(types); + + callenmd.target = method; + callenmd.type = method.rtn; + parentemd.statement = !parentemd.read && callenmd.last; + parentemd.current = method.rtn; + + if (size != types.length) { + throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); + } + } else { + types = new Type[arguments.size()]; + Arrays.fill(types, definition.defType); + + callenmd.target = name; + callenmd.type = definition.defType; + parentemd.statement = !parentemd.read && callenmd.last; + parentemd.current = callenmd.type; + } + + for (int argument = 0; argument < size; ++argument) { + final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = types[argument]; + visit(exprctx); + markCast(expremd); + } + + parentemd.statik = false; + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtvar(final ExtvarContext ctx) { + final ExtNodeMetadata varenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = varenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final String name = ctx.ID().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (parentemd.current != null) { + throw new IllegalStateException(error(ctx) + "Unexpected variable [" + name + "] load."); + } + + varenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final Variable variable = getVariable(name); + + if (variable == null) { + throw new IllegalArgumentException(error(ctx) + "Unknown variable [" + name + "]."); + } + + varenmd.target = variable.slot; + varenmd.type = variable.type; + analyzeLoadStoreExternal(ctx); + parentemd.current = varenmd.type; + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtfield(final ExtfieldContext ctx) { + final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = memberenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { + throw new IllegalArgumentException(error(ctx) + "Unexpected parser state."); + } + + final String value = ctx.EXTID() == null ? ctx.EXTINTEGER().getText() : ctx.EXTID().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + final boolean store = memberenmd.last && parentemd.storeExpr != null; + + if (parentemd.current == null) { + throw new IllegalStateException(error(ctx) + "Unexpected field [" + value + "] load."); + } + + if (parentemd.current.sort == Sort.ARRAY) { + if ("length".equals(value)) { + if (!parentemd.read) { + throw new IllegalArgumentException(error(ctx) + "Must read array field [length]."); + } else if (store) { + throw new IllegalArgumentException( + error(ctx) + "Cannot write to read-only array field [length]."); + } + + memberenmd.target = "#length"; + memberenmd.type = definition.intType; + parentemd.current = definition.intType; + } else { + throw new IllegalArgumentException(error(ctx) + "Unexpected array field [" + value + "]."); + } + } else if (parentemd.current.sort == Sort.DEF) { + memberenmd.target = value; + memberenmd.type = definition.defType; + analyzeLoadStoreExternal(ctx); + parentemd.current = memberenmd.type; + } else { + final Struct struct = parentemd.current.struct; + final Field field = parentemd.statik ? struct.statics.get(value) : struct.members.get(value); + + if (field != null) { + if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { + throw new IllegalArgumentException(error(ctx) + "Cannot write to read-only" + + " field [" + value + "] for type [" + struct.name + "]."); + } + + memberenmd.target = field; + memberenmd.type = field.type; + analyzeLoadStoreExternal(ctx); + parentemd.current = memberenmd.type; + } else { + final boolean get = parentemd.read || parentemd.token > 0 || !memberenmd.last; + final boolean set = memberenmd.last && store; + + Method getter = struct.methods.get("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); + Method setter = struct.methods.get("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1)); + Object constant = null; + + if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { + throw new IllegalArgumentException(error(ctx) + + "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); + } + + Type settype = setter == null ? null : setter.arguments.get(0); + + if (getter == null && setter == null) { + if (ctx.EXTID() != null) { + try { + parentemd.current.clazz.asSubclass(Map.class); + + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("put"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.STRING)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.arguments.size() != 2 || + setter.arguments.get(0).sort != Sort.STRING)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { + throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + } + + settype = setter == null ? null : setter.arguments.get(1); + constant = value; + } catch (ClassCastException exception) { + //Do nothing. + } + } else if (ctx.EXTINTEGER() != null) { + try { + parentemd.current.clazz.asSubclass(List.class); + + getter = parentemd.current.struct.methods.get("get"); + setter = parentemd.current.struct.methods.get("add"); + + if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || + getter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || + setter.arguments.get(0).sort != Sort.INT)) { + throw new IllegalArgumentException(error(ctx) + + "Illegal list add shortcut [" + value + "] for type [" + struct.name + "]."); + } + + if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { + throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); + } + + settype = setter == null ? null : setter.arguments.get(1); + + try { + constant = Integer.parseInt(value); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(error(ctx) + + "Illegal list shortcut value [" + value + "]."); + } + } catch (ClassCastException exception) { + //Do nothing. + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + } + + if ((get || set) && (!get || getter != null) && (!set || setter != null)) { + memberenmd.target = new Object[] {getter, setter, constant != null, constant}; + memberenmd.type = get ? getter.rtn : settype; + analyzeLoadStoreExternal(ctx); + parentemd.current = get ? getter.rtn : setter.rtn; + } + } + + if (memberenmd.target == null) { + throw new IllegalArgumentException( + error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); + } + } + + parentemd.statik = false; + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtnew(ExtnewContext ctx) { + final ExtNodeMetadata newenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = newenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + newenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + + final String name = ctx.TYPE().getText(); + final Struct struct = definition.structs.get(name); + + if (parentemd.current != null) { + throw new IllegalArgumentException(error(ctx) + "Unexpected new call."); + } else if (struct == null) { + throw new IllegalArgumentException(error(ctx) + "Specified type [" + name + "] not found."); + } else if (newenmd.last && parentemd.storeExpr != null) { + throw new IllegalArgumentException(error(ctx) + "Cannot assign a value to a new call."); + } + + final boolean newclass = ctx.arguments() != null; + final boolean newarray = !ctx.expression().isEmpty(); + + final List arguments = newclass ? ctx.arguments().expression() : ctx.expression(); + final int size = arguments.size(); + + Type[] types; + + if (newarray) { + if (!parentemd.read) { + throw new IllegalArgumentException(error(ctx) + "A newly created array must be assigned."); + } + + types = new Type[size]; + Arrays.fill(types, definition.intType); + + newenmd.target = "#makearray"; + + if (size > 1) { + newenmd.type = definition.getType(struct, size); + parentemd.current = newenmd.type; + } else if (size == 1) { + newenmd.type = definition.getType(struct, 0); + parentemd.current = definition.getType(struct, 1); + } else { + throw new IllegalArgumentException(error(ctx) + "A newly created array cannot have zero dimensions."); + } + } else if (newclass) { + final Constructor constructor = struct.constructors.get("new"); + + if (constructor != null) { + types = new Type[constructor.arguments.size()]; + constructor.arguments.toArray(types); + + newenmd.target = constructor; + newenmd.type = definition.getType(struct, 0); + parentemd.statement = !parentemd.read && newenmd.last; + parentemd.current = newenmd.type; + } else { + throw new IllegalArgumentException( + error(ctx) + "Unknown new call on type [" + struct.name + "]."); + } + } else { + throw new IllegalArgumentException(error(ctx) + "Unknown parser state."); + } + + if (size != types.length) { + throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); + } + + for (int argument = 0; argument < size; ++argument) { + final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + expremd.to = types[argument]; + visit(exprctx); + markCast(expremd); + } + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtstring(final ExtstringContext ctx) { + final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ParserRuleContext parent = memberenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + final String string = ctx.STRING().getText(); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + memberenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; + final boolean store = memberenmd.last && parentemd.storeExpr != null; + + if (parentemd.current != null) { + throw new IllegalStateException(error(ctx) + "Unexpected String constant [" + string + "]."); + } + + if (!parentemd.read) { + throw new IllegalArgumentException(error(ctx) + "Must read String constant [" + string + "]."); + } else if (store) { + throw new IllegalArgumentException( + error(ctx) + "Cannot write to read-only String constant [" + string + "]."); + } + + memberenmd.target = string; + memberenmd.type = definition.stringType; + parentemd.current = definition.stringType; + + if (memberenmd.last) { + parentemd.constant = string; + } + + if (dotctx != null) { + adapter.createExtNodeMetadata(parent, dotctx); + visit(dotctx); + } else if (bracectx != null) { + adapter.createExtNodeMetadata(parent, bracectx); + visit(bracectx); + } + + return null; + } + + @Override + public Void visitArguments(final ArgumentsContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); + } + + @Override + public Void visitIncrement(IncrementContext ctx) { + final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final Sort sort = incremd.to == null ? null : incremd.to.sort; + final boolean positive = ctx.INCR() != null; + + if (incremd.to == null) { + incremd.preConst = positive ? 1 : -1; + incremd.from = definition.intType; + } else { + switch (sort) { + case LONG: + incremd.preConst = positive ? 1L : -1L; + incremd.from = definition.longType; + case FLOAT: + incremd.preConst = positive ? 1.0F : -1.0F; + incremd.from = definition.floatType; + case DOUBLE: + incremd.preConst = positive ? 1.0 : -1.0; + incremd.from = definition.doubleType; + default: + incremd.preConst = positive ? 1 : -1; + incremd.from = definition.intType; + } + } + + return null; + } + + private void analyzeLoadStoreExternal(final ParserRuleContext source) { + final ExtNodeMetadata extenmd = adapter.getExtNodeMetadata(source); + final ParserRuleContext parent = extenmd.parent; + final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + + if (extenmd.last && parentemd.storeExpr != null) { + final ParserRuleContext store = parentemd.storeExpr; + final ExpressionMetadata storeemd = adapter.createExpressionMetadata(parentemd.storeExpr); + final int token = parentemd.token; + + if (token > 0) { + visit(store); + + final boolean add = token == ADD; + final boolean xor = token == BWAND || token == BWXOR || token == BWOR; + final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB; + + extenmd.promote = add ? promoteAdd(extenmd.type, storeemd.from) : + xor ? promoteXor(extenmd.type, storeemd.from) : + promoteNumeric(extenmd.type, storeemd.from, decimal, true); + + if (extenmd.promote == null) { + throw new IllegalArgumentException("Cannot apply compound assignment to " + + " types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); + } + + extenmd.castFrom = getLegalCast(source, extenmd.type, extenmd.promote, false); + extenmd.castTo = getLegalCast(source, extenmd.promote, extenmd.type, true); + + storeemd.to = add && extenmd.promote.sort == Sort.STRING ? storeemd.from : extenmd.promote; + markCast(storeemd); + } else { + storeemd.to = extenmd.type; + visit(store); + markCast(storeemd); + } + } + } + + private void markCast(final ExpressionMetadata emd) { + if (emd.from == null) { + throw new IllegalStateException(error(emd.source) + "From cast type should never be null."); + } + + if (emd.to != null) { + emd.cast = getLegalCast(emd.source, emd.from, emd.to, emd.explicit || !emd.typesafe); + + if (emd.preConst != null && emd.to.sort.constant) { + emd.postConst = constCast(emd.source, emd.preConst, emd.cast); + } + } else { + throw new IllegalStateException(error(emd.source) + "To cast type should never be null."); + } + } + + private Cast getLegalCast(final ParserRuleContext source, final Type from, final Type to, final boolean explicit) { + final Cast cast = new Cast(from, to); + + if (from.equals(to)) { + return cast; + } + + if (from.sort == Sort.DEF && to.sort != Sort.VOID || from.sort != Sort.VOID && to.sort == Sort.DEF) { + final Transform transform = definition.transforms.get(cast); + + if (transform != null) { + return transform; + } + + return cast; + } + + switch (from.sort) { + case BOOL: + switch (to.sort) { + case OBJECT: + case BOOL_OBJ: + return checkTransform(source, cast); + } + + break; + case BYTE: + switch (to.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case BYTE_OBJ: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case SHORT: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case CHAR: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case INT: + switch (to.sort) { + case LONG: + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case LONG: + switch (to.sort) { + case FLOAT: + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + case INT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case FLOAT: + switch (to.sort) { + case DOUBLE: + return cast; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case DOUBLE: + switch (to.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + if (explicit) + return cast; + + break; + case OBJECT: + case NUMBER: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case OBJECT: + case NUMBER: + switch (to.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case BOOL_OBJ: + switch (to.sort) { + case BOOL: + return checkTransform(source, cast); + } + + break; + case BYTE_OBJ: + switch (to.sort) { + case BYTE: + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case SHORT_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case CHAR: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case SHORT_OBJ: + switch (to.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case CHAR: + case BYTE_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case CHAR_OBJ: + switch (to.sort) { + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case BYTE_OBJ: + case SHORT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case INT_OBJ: + switch (to.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + case LONG_OBJ: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case LONG_OBJ: + switch (to.sort) { + case LONG: + case FLOAT: + case DOUBLE: + case FLOAT_OBJ: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case FLOAT_OBJ: + switch (to.sort) { + case FLOAT: + case DOUBLE: + case DOUBLE_OBJ: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + case DOUBLE_OBJ: + switch (to.sort) { + case DOUBLE: + return checkTransform(source, cast); + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case BYTE_OBJ: + case SHORT_OBJ: + case CHAR_OBJ: + case INT_OBJ: + case LONG_OBJ: + case FLOAT_OBJ: + if (explicit) + return checkTransform(source, cast); + + break; + } + + break; + } + + try { + from.clazz.asSubclass(to.clazz); + + return cast; + } catch (ClassCastException cce0) { + try { + if (explicit) { + to.clazz.asSubclass(from.clazz); + + return cast; + } else { + throw new ClassCastException( + error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } catch (ClassCastException cce1) { + throw new ClassCastException( + error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } + } + + private Transform checkTransform(final ParserRuleContext source, final Cast cast) { + final Transform transform = definition.transforms.get(cast); + + if (transform == null) { + throw new ClassCastException( + error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); + } + + return transform; + } + + private Object constCast(final ParserRuleContext source, final Object constant, final Cast cast) { + if (cast instanceof Transform) { + final Transform transform = (Transform)cast; + return invokeTransform(source, transform, constant); + } else { + final Sort fsort = cast.from.sort; + final Sort tsort = cast.to.sort; + + if (fsort == tsort) { + return constant; + } else if (fsort.numeric && tsort.numeric) { + Number number; + + if (fsort == Sort.CHAR) { + number = (int)(char)constant; + } else { + number = (Number)constant; + } + + switch (tsort) { + case BYTE: return number.byteValue(); + case SHORT: return number.shortValue(); + case CHAR: return (char)number.intValue(); + case INT: return number.intValue(); + case LONG: return number.longValue(); + case FLOAT: return number.floatValue(); + case DOUBLE: return number.doubleValue(); + default: + throw new IllegalStateException(error(source) + "Expected numeric type for cast."); + } + } else { + throw new IllegalStateException(error(source) + "No valid constant cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to " + + "[" + cast.to.clazz.getCanonicalName() + "]."); + } + } + } + + private Object invokeTransform(final ParserRuleContext source, final Transform transform, final Object object) { + final Method method = transform.method; + final java.lang.reflect.Method jmethod = method.reflect; + final int modifiers = jmethod.getModifiers(); + + try { + if (java.lang.reflect.Modifier.isStatic(modifiers)) { + return jmethod.invoke(null, object); + } else { + return jmethod.invoke(object); + } + } catch (IllegalAccessException | IllegalArgumentException | + java.lang.reflect.InvocationTargetException | NullPointerException | + ExceptionInInitializerError exception) { + throw new IllegalStateException(error(source) + "Unable to invoke transform to cast constant from " + + "[" + transform.from.name + "] to [" + transform.to.name + "]."); + } + } + + private Type promoteNumeric(final Type from, boolean decimal, boolean primitive) { + final Sort sort = from.sort; + + if (sort == Sort.DEF) { + return definition.defType; + } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { + return primitive ? definition.floatType : definition.floatobjType; + } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; + } else if (sort.numeric) { + return primitive ? definition.intType : definition.intobjType; + } + + return null; + } + + private Type promoteNumeric(final Type from0, final Type from1, boolean decimal, boolean primitive) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + if (decimal) { + if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER || + sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { + return primitive ? definition.floatType : definition.floatobjType; + } + } + + if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER || + sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; + } else if (sort0.numeric && sort1.numeric) { + return primitive ? definition.intType : definition.intobjType; + } + + return null; + } + + private Type promoteAdd(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.STRING || sort1 == Sort.STRING) { + return definition.stringType; + } + + return promoteNumeric(from0, from1, true, true); + } + + private Type promoteXor(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0.bool || sort1.bool) { + return definition.booleanType; + } + + return promoteNumeric(from0, from1, false, true); + } + + private Type promoteEquality(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + final boolean primitive = sort0.primitive && sort1.primitive; + + if (sort0.bool && sort1.bool) { + return primitive ? definition.booleanType : definition.byteobjType; + } + + if (sort0.numeric && sort1.numeric) { + return promoteNumeric(from0, from1, true, primitive); + } + + return definition.objectType; + } + + private Type promoteReference(final Type from0, final Type from1) { + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + if (sort0.primitive && sort1.primitive) { + if (sort0.bool && sort1.bool) { + return definition.booleanType; + } + + if (sort0.numeric && sort1.numeric) { + return promoteNumeric(from0, from1, true, true); + } + } + + return definition.objectType; + } + + private Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) { + if (from0.equals(from1)) { + return from0; + } + + final Sort sort0 = from0.sort; + final Sort sort1 = from1.sort; + + if (sort0 == Sort.DEF || sort1 == Sort.DEF) { + return definition.defType; + } + + final boolean primitive = sort0.primitive && sort1.primitive; + + if (sort0.bool && sort1.bool) { + return primitive ? definition.booleanType : definition.booleanobjType; + } + + if (sort0.numeric && sort1.numeric) { + if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { + return primitive ? definition.doubleType : definition.doubleobjType; + } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { + return primitive ? definition.floatType : definition.floatobjType; + } else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { + return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType; + } else { + if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + return primitive ? definition.byteType : definition.byteobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (const1 != null) { + final short constant = (short)const1; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + if (const0 != null) { + final short constant = (short)const0; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return primitive ? definition.shortType : definition.shortobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { + return primitive ? definition.shortType : definition.shortobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return primitive ? definition.charType : definition.charobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + if (const1 != null) { + final int constant = (int)const1; + + if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } + } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) { + if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + if (const0 != null) { + final int constant = (int)const0; + + if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { + return primitive ? definition.byteType : definition.byteobjType; + } + } + + return primitive ? definition.intType : definition.intobjType; + } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return primitive ? definition.intType : definition.intobjType; + } + } + } + } + + final Pair pair = new Pair(from0, from1); + final Type bound = definition.bounds.get(pair); + + return bound == null ? definition.objectType : bound; + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java new file mode 100644 index 00000000000..6f4a23765b5 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.net.MalformedURLException; +import java.net.URL; +import java.security.CodeSource; +import java.security.SecureClassLoader; +import java.security.cert.Certificate; + +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.bootstrap.BootstrapInfo; + +final class Compiler { + private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); + + /** we define the class with lowest privileges */ + private static final CodeSource CODESOURCE; + + static { + try { + CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); + } catch (MalformedURLException impossible) { + throw new RuntimeException(impossible); + } + } + + static class Loader extends SecureClassLoader { + Loader(ClassLoader parent) { + super(parent); + } + + Class define(String name, byte[] bytes) { + return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(Executable.class); + } + } + + static Executable compile(Loader loader, final String name, final String source, final Definition custom, CompilerSettings settings) { + long start = System.currentTimeMillis(); + + final Definition definition = custom == null ? DEFAULT_DEFINITION : new Definition(custom); + + //long end = System.currentTimeMillis() - start; + //System.out.println("types: " + end); + //start = System.currentTimeMillis(); + + //final ParserRuleContext root = createParseTree(source, types); + final ANTLRInputStream stream = new ANTLRInputStream(source); + final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); + final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); + final ParserErrorStrategy strategy = new ParserErrorStrategy(); + + lexer.removeErrorListeners(); + lexer.setTypes(definition.structs.keySet()); + + //List tokens = lexer.getAllTokens(); + + //for (final Token token : tokens) { + // System.out.println(token.getType() + " " + token.getText()); + //} + + parser.removeErrorListeners(); + parser.setErrorHandler(strategy); + + ParserRuleContext root = parser.source(); + + //end = System.currentTimeMillis() - start; + //System.out.println("tree: " + end); + + final Adapter adapter = new Adapter(definition, source, root, settings); + + start = System.currentTimeMillis(); + + Analyzer.analyze(adapter); + //System.out.println(root.toStringTree(parser)); + + //end = System.currentTimeMillis() - start; + //System.out.println("analyze: " + end); + //start = System.currentTimeMillis(); + + final byte[] bytes = Writer.write(adapter); + + //end = System.currentTimeMillis() - start; + //System.out.println("write: " + end); + //start = System.currentTimeMillis(); + + final Executable executable = createExecutable(loader, definition, name, source, bytes); + + //end = System.currentTimeMillis() - start; + //System.out.println("create: " + end); + + return executable; + } + + private static ParserRuleContext createParseTree(String source, Definition definition) { + final ANTLRInputStream stream = new ANTLRInputStream(source); + final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); + final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); + final ParserErrorStrategy strategy = new ParserErrorStrategy(); + + lexer.removeErrorListeners(); + lexer.setTypes(definition.structs.keySet()); + + parser.removeErrorListeners(); + parser.setErrorHandler(strategy); + + ParserRuleContext root = parser.source(); + // System.out.println(root.toStringTree(parser)); + return root; + } + + private static Executable createExecutable(Loader loader, Definition definition, String name, String source, byte[] bytes) { + try { + // for debugging: + //try { + // FileOutputStream f = new FileOutputStream(new File("/Users/jdconrad/lang/generated/out.class"), false); + // f.write(bytes); + // f.close(); + //} catch (Exception e) { + // throw new RuntimeException(e); + //} + + final Class clazz = loader.define(Writer.CLASS_NAME, bytes); + final java.lang.reflect.Constructor constructor = + clazz.getConstructor(Definition.class, String.class, String.class); + + return constructor.newInstance(definition, name, source); + } catch (Exception exception) { + throw new IllegalStateException( + "An internal error occurred attempting to define the script [" + name + "].", exception); + } + } + + private Compiler() {} +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java new file mode 100644 index 00000000000..f66b65d0612 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** + * Settings to use when compiling a script + */ +final class CompilerSettings { + + private boolean numericOverflow = true; + + /** + * Returns {@code true} if numeric operations should overflow, {@code false} + * if they should signal an exception. + *

+ * If this value is {@code true} (default), then things behave like java: + * overflow for integer types can result in unexpected values / unexpected + * signs, and overflow for floating point types can result in infinite or + * {@code NaN} values. + */ + public boolean getNumericOverflow() { + return numericOverflow; + } + + /** + * Set {@code true} for numerics to overflow, false to deliver exceptions. + * @see #getNumericOverflow + */ + public void setNumericOverflow(boolean allow) { + this.numericOverflow = allow; + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java new file mode 100644 index 00000000000..2a1eb13408c --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java @@ -0,0 +1,1250 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.lang.invoke.MethodHandle; +import java.lang.reflect.Array; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.plan.a.Definition.*; + +public class Def { + public static Object methodCall(final Object owner, final String name, final Definition definition, + final Object[] arguments, final boolean[] typesafe) { + final Method method = getMethod(owner, name, definition); + + if (method == null) { + throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] " + + "for class [" + owner.getClass().getCanonicalName() + "]."); + } + + final MethodHandle handle = method.handle; + final List types = method.arguments; + final Object[] parameters = new Object[arguments.length + 1]; + + parameters[0] = owner; + + if (types.size() != arguments.length) { + throw new IllegalArgumentException("When dynamically calling [" + name + "] from class " + + "[" + owner.getClass() + "] expected [" + types.size() + "] arguments," + + " but found [" + arguments.length + "]."); + } + + try { + for (int count = 0; count < arguments.length; ++count) { + if (typesafe[count]) { + parameters[count + 1] = arguments[count]; + } else { + final Transform transform = getTransform(arguments[count].getClass(), types.get(count).clazz, definition); + parameters[count + 1] = transform == null ? arguments[count] : transform.method.handle.invoke(arguments[count]); + } + } + + return handle.invokeWithArguments(parameters); + } catch (Throwable throwable) { + throw new IllegalArgumentException("Error invoking method [" + name + "] " + + "with owner class [" + owner.getClass().getCanonicalName() + "].", throwable); + } + } + + @SuppressWarnings("unchecked") + public static void fieldStore(final Object owner, Object value, final String name, + final Definition definition, final boolean typesafe) { + final Field field = getField(owner, name, definition); + MethodHandle handle = null; + + if (field == null) { + final String set = "set" + Character.toUpperCase(name.charAt(0)) + name.substring(1); + final Method method = getMethod(owner, set, definition); + + if (method != null) { + handle = method.handle; + } + } else { + handle = field.setter; + } + + if (handle != null) { + try { + if (!typesafe) { + final Transform transform = getTransform(value.getClass(), handle.type().parameterType(1), definition); + + if (transform != null) { + value = transform.method.handle.invoke(value); + } + } + + handle.invoke(owner, value); + } catch (Throwable throwable) { + throw new IllegalArgumentException("Error storing value [" + value + "] " + + "in field [" + name + "] with owner class [" + owner.getClass() + "].", throwable); + } + } else if (owner instanceof Map) { + ((Map)owner).put(name, value); + } else if (owner instanceof List) { + try { + final int index = Integer.parseInt(name); + ((List)owner).add(index, value); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + } + } else { + throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + + "for class [" + owner.getClass().getCanonicalName() + "]."); + } + } + + @SuppressWarnings("unchecked") + public static Object fieldLoad(final Object owner, final String name, final Definition definition) { + if (owner.getClass().isArray() && "length".equals(name)) { + return Array.getLength(owner); + } else { + final Field field = getField(owner, name, definition); + MethodHandle handle; + + if (field == null) { + final String get = "get" + Character.toUpperCase(name.charAt(0)) + name.substring(1); + final Method method = getMethod(owner, get, definition); + + if (method != null) { + handle = method.handle; + } else if (owner instanceof Map) { + return ((Map)owner).get(name); + } else if (owner instanceof List) { + try { + final int index = Integer.parseInt(name); + + return ((List)owner).get(index); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + } + } else { + throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + + "for class [" + owner.getClass().getCanonicalName() + "]."); + } + } else { + handle = field.getter; + } + + if (handle == null) { + throw new IllegalArgumentException( + "Unable to read from field [" + name + "] with owner class [" + owner.getClass() + "]."); + } else { + try { + return handle.invoke(owner); + } catch (final Throwable throwable) { + throw new IllegalArgumentException("Error loading value from " + + "field [" + name + "] with owner class [" + owner.getClass() + "].", throwable); + } + } + } + } + + @SuppressWarnings("unchecked") + public static void arrayStore(final Object array, Object index, Object value, final Definition definition, + final boolean indexsafe, final boolean valuesafe) { + if (array instanceof Map) { + ((Map)array).put(index, value); + } else { + try { + if (!indexsafe) { + final Transform transform = getTransform(index.getClass(), Integer.class, definition); + + if (transform != null) { + index = transform.method.handle.invoke(index); + } + } + } catch (final Throwable throwable) { + throw new IllegalArgumentException( + "Error storing value [" + value + "] in list using index [" + index + "].", throwable); + } + + if (array.getClass().isArray()) { + try { + if (!valuesafe) { + final Transform transform = getTransform(value.getClass(), array.getClass().getComponentType(), definition); + + if (transform != null) { + value = transform.method.handle.invoke(value); + } + } + + Array.set(array, (int)index, value); + } catch (final Throwable throwable) { + throw new IllegalArgumentException("Error storing value [" + value + "] " + + "in array class [" + array.getClass().getCanonicalName() + "].", throwable); + } + } else if (array instanceof List) { + ((List)array).add((int)index, value); + } else { + throw new IllegalArgumentException("Attempting to address a non-array type " + + "[" + array.getClass().getCanonicalName() + "] as an array."); + } + } + } + + @SuppressWarnings("unchecked") + public static Object arrayLoad(final Object array, Object index, + final Definition definition, final boolean indexsafe) { + if (array instanceof Map) { + return ((Map)array).get(index); + } else { + try { + if (!indexsafe) { + final Transform transform = getTransform(index.getClass(), Integer.class, definition); + + if (transform != null) { + index = transform.method.handle.invoke(index); + } + } + } catch (final Throwable throwable) { + throw new IllegalArgumentException( + "Error loading value using index [" + index + "].", throwable); + } + + if (array.getClass().isArray()) { + try { + return Array.get(array, (int)index); + } catch (final Throwable throwable) { + throw new IllegalArgumentException("Error loading value from " + + "array class [" + array.getClass().getCanonicalName() + "].", throwable); + } + } else if (array instanceof List) { + return ((List)array).get((int)index); + } else { + throw new IllegalArgumentException("Attempting to address a non-array type " + + "[" + array.getClass().getCanonicalName() + "] as an array."); + } + } + } + + public static Method getMethod(final Object owner, final String name, final Definition definition) { + Struct struct = null; + Class clazz = owner.getClass(); + Method method = null; + + while (clazz != null) { + struct = definition.classes.get(clazz); + + if (struct != null) { + method = struct.methods.get(name); + + if (method != null) { + break; + } + } + + for (final Class iface : clazz.getInterfaces()) { + struct = definition.classes.get(iface); + + if (struct != null) { + method = struct.methods.get(name); + + if (method != null) { + break; + } + } + } + + if (struct != null) { + method = struct.methods.get(name); + + if (method != null) { + break; + } + } + + clazz = clazz.getSuperclass(); + } + + if (struct == null) { + throw new IllegalArgumentException("Unable to find a dynamic struct for class [" + owner.getClass() + "]."); + } + + return method; + } + + public static Field getField(final Object owner, final String name, final Definition definition) { + Struct struct = null; + Class clazz = owner.getClass(); + Field field = null; + + while (clazz != null) { + struct = definition.classes.get(clazz); + + if (struct != null) { + field = struct.members.get(name); + + if (field != null) { + break; + } + } + + for (final Class iface : clazz.getInterfaces()) { + struct = definition.classes.get(iface); + + if (struct != null) { + field = struct.members.get(name); + + if (field != null) { + break; + } + } + } + + if (struct != null) { + field = struct.members.get(name); + + if (field != null) { + break; + } + } + + clazz = clazz.getSuperclass(); + } + + if (struct == null) { + throw new IllegalArgumentException("Unable to find a dynamic struct for class [" + owner.getClass() + "]."); + } + + return field; + } + + public static Transform getTransform(Class fromClass, Class toClass, final Definition definition) { + Struct fromStruct = null; + Struct toStruct = null; + + if (fromClass.equals(toClass)) { + return null; + } + + while (fromClass != null) { + fromStruct = definition.classes.get(fromClass); + + if (fromStruct != null) { + break; + } + + for (final Class iface : fromClass.getInterfaces()) { + fromStruct = definition.classes.get(iface); + + if (fromStruct != null) { + break; + } + } + + if (fromStruct != null) { + break; + } + + fromClass = fromClass.getSuperclass(); + } + + if (fromStruct != null) { + while (toClass != null) { + toStruct = definition.classes.get(toClass); + + if (toStruct != null) { + break; + } + + for (final Class iface : toClass.getInterfaces()) { + toStruct = definition.classes.get(iface); + + if (toStruct != null) { + break; + } + } + + if (toStruct != null) { + break; + } + + toClass = toClass.getSuperclass(); + } + } + + if (toStruct != null) { + final Type fromType = definition.getType(fromStruct.name); + final Type toType = definition.getType(toStruct.name); + final Cast cast = new Cast(fromType, toType); + + return definition.transforms.get(cast); + } + + return null; + } + + public static Object not(final Object unary) { + if (unary instanceof Double || unary instanceof Float || unary instanceof Long) { + return ~((Number)unary).longValue(); + } else if (unary instanceof Number) { + return ~((Number)unary).intValue(); + } else if (unary instanceof Character) { + return ~(int)(char)unary; + } + + throw new ClassCastException("Cannot apply [~] operation to type " + + "[" + unary.getClass().getCanonicalName() + "]."); + } + + public static Object neg(final Object unary) { + if (unary instanceof Double) { + return -(double)unary; + } else if (unary instanceof Float) { + return -(float)unary; + } else if (unary instanceof Long) { + return -(long)unary; + } else if (unary instanceof Number) { + return -((Number)unary).intValue(); + } else if (unary instanceof Character) { + return -(char)unary; + } + + throw new ClassCastException("Cannot apply [-] operation to type " + + "[" + unary.getClass().getCanonicalName() + "]."); + } + + public static Object mul(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() * ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() * ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() * ((Number)right).longValue(); + } else { + return ((Number)left).intValue() * ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() * (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() * (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() * (long)(char)right; + } else { + return ((Number)left).intValue() * (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left * ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left * ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left * ((Number)right).longValue(); + } else { + return (int)(char)left * ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left * (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [*] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object div(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() / ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() / ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() / ((Number)right).longValue(); + } else { + return ((Number)left).intValue() / ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() / (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() / (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() / (long)(char)right; + } else { + return ((Number)left).intValue() / (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left / ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left / ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left / ((Number)right).longValue(); + } else { + return (int)(char)left / ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left / (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [/] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object rem(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() % ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() % ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() % ((Number)right).longValue(); + } else { + return ((Number)left).intValue() % ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() % (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() % (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() % (long)(char)right; + } else { + return ((Number)left).intValue() % (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left % ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left % ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left % ((Number)right).longValue(); + } else { + return (int)(char)left % ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left % (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [%] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object add(final Object left, final Object right) { + if (left instanceof String || right instanceof String) { + return "" + left + right; + } else if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() + ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() + ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() + ((Number)right).longValue(); + } else { + return ((Number)left).intValue() + ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() + (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() + (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() + (long)(char)right; + } else { + return ((Number)left).intValue() + (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left + ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left + ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left + ((Number)right).longValue(); + } else { + return (int)(char)left + ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left + (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [+] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object sub(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() - ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() - ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() - ((Number)right).longValue(); + } else { + return ((Number)left).intValue() - ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() - (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() - (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() - (long)(char)right; + } else { + return ((Number)left).intValue() - (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left - ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left - ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left - ((Number)right).longValue(); + } else { + return (int)(char)left - ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left - (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [-] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object lsh(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() << ((Number)right).longValue(); + } else { + return ((Number)left).intValue() << ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() << (long)(char)right; + } else { + return ((Number)left).intValue() << (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left << ((Number)right).longValue(); + } else { + return (int)(char)left << ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left << (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [<<] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object rsh(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() >> ((Number)right).longValue(); + } else { + return ((Number)left).intValue() >> ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() >> (long)(char)right; + } else { + return ((Number)left).intValue() >> (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left >> ((Number)right).longValue(); + } else { + return (int)(char)left >> ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left >> (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [>>] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object ush(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() >>> ((Number)right).longValue(); + } else { + return ((Number)left).intValue() >>> ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() >>> (long)(char)right; + } else { + return ((Number)left).intValue() >>> (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left >>> ((Number)right).longValue(); + } else { + return (int)(char)left >>> ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left >>> (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [>>>] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object and(final Object left, final Object right) { + if (left instanceof Boolean && right instanceof Boolean) { + return (boolean)left && (boolean)right; + } else if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() & ((Number)right).longValue(); + } else { + return ((Number)left).intValue() & ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() & (long)(char)right; + } else { + return ((Number)left).intValue() & (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left & ((Number)right).longValue(); + } else { + return (int)(char)left & ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left & (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [&] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object xor(final Object left, final Object right) { + if (left instanceof Boolean && right instanceof Boolean) { + return (boolean)left ^ (boolean)right; + } else if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() ^ ((Number)right).longValue(); + } else { + return ((Number)left).intValue() ^ ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() ^ (long)(char)right; + } else { + return ((Number)left).intValue() ^ (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left ^ ((Number)right).longValue(); + } else { + return (int)(char)left ^ ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left ^ (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [^] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static Object or(final Object left, final Object right) { + if (left instanceof Boolean && right instanceof Boolean) { + return (boolean)left || (boolean)right; + } else if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double || + left instanceof Float || right instanceof Float || + left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() | ((Number)right).longValue(); + } else { + return ((Number)left).intValue() | ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double || left instanceof Float || left instanceof Long) { + return ((Number)left).longValue() | (long)(char)right; + } else { + return ((Number)left).intValue() | (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double || right instanceof Float || right instanceof Long) { + return (long)(char)left | ((Number)right).longValue(); + } else { + return (int)(char)left | ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left | (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [|] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static boolean eq(final Object left, final Object right) { + if (left != null && right != null) { + if (left instanceof Double) { + if (right instanceof Number) { + return (double)left == ((Number)right).doubleValue(); + } else if (right instanceof Character) { + return (double)left == (double)(char)right; + } + } else if (right instanceof Double) { + if (left instanceof Number) { + return ((Number)left).doubleValue() == (double)right; + } else if (left instanceof Character) { + return (double)(char)left == ((Number)right).doubleValue(); + } + } else if (left instanceof Float) { + if (right instanceof Number) { + return (float)left == ((Number)right).floatValue(); + } else if (right instanceof Character) { + return (float)left == (float)(char)right; + } + } else if (right instanceof Float) { + if (left instanceof Number) { + return ((Number)left).floatValue() == (float)right; + } else if (left instanceof Character) { + return (float)(char)left == ((Number)right).floatValue(); + } + } else if (left instanceof Long) { + if (right instanceof Number) { + return (long)left == ((Number)right).longValue(); + } else if (right instanceof Character) { + return (long)left == (long)(char)right; + } + } else if (right instanceof Long) { + if (left instanceof Number) { + return ((Number)left).longValue() == (long)right; + } else if (left instanceof Character) { + return (long)(char)left == ((Number)right).longValue(); + } + } else if (left instanceof Number) { + if (right instanceof Number) { + return ((Number)left).intValue() == ((Number)right).intValue(); + } else if (right instanceof Character) { + return ((Number)left).intValue() == (int)(char)right; + } + } else if (right instanceof Number && left instanceof Character) { + return (int)(char)left == ((Number)right).intValue(); + } else if (left instanceof Character && right instanceof Character) { + return (int)(char)left == (int)(char)right; + } + + return left.equals(right); + } + + return left == null && right == null; + } + + public static boolean lt(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() < ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() < ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() < ((Number)right).longValue(); + } else { + return ((Number)left).intValue() < ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() < (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() < (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() < (long)(char)right; + } else { + return ((Number)left).intValue() < (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left < ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left < ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left < ((Number)right).longValue(); + } else { + return (int)(char)left < ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left < (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [<] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static boolean lte(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() <= ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() <= ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() <= ((Number)right).longValue(); + } else { + return ((Number)left).intValue() <= ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() <= (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() <= (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() <= (long)(char)right; + } else { + return ((Number)left).intValue() <= (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left <= ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left <= ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left <= ((Number)right).longValue(); + } else { + return (int)(char)left <= ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left <= (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [<=] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static boolean gt(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() > ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() > ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() > ((Number)right).longValue(); + } else { + return ((Number)left).intValue() > ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() > (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() > (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() > (long)(char)right; + } else { + return ((Number)left).intValue() > (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left > ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left > ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left > ((Number)right).longValue(); + } else { + return (int)(char)left > ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left > (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [>] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static boolean gte(final Object left, final Object right) { + if (left instanceof Number) { + if (right instanceof Number) { + if (left instanceof Double || right instanceof Double) { + return ((Number)left).doubleValue() >= ((Number)right).doubleValue(); + } else if (left instanceof Float || right instanceof Float) { + return ((Number)left).floatValue() >= ((Number)right).floatValue(); + } else if (left instanceof Long || right instanceof Long) { + return ((Number)left).longValue() >= ((Number)right).longValue(); + } else { + return ((Number)left).intValue() >= ((Number)right).intValue(); + } + } else if (right instanceof Character) { + if (left instanceof Double) { + return ((Number)left).doubleValue() >= (double)(char)right; + } else if (left instanceof Float) { + return ((Number)left).floatValue() >= (float)(char)right; + } else if (left instanceof Long) { + return ((Number)left).longValue() >= (long)(char)right; + } else { + return ((Number)left).intValue() >= (int)(char)right; + } + } + } else if (left instanceof Character) { + if (right instanceof Number) { + if (right instanceof Double) { + return (double)(char)left >= ((Number)right).doubleValue(); + } else if (right instanceof Float) { + return (float)(char)left >= ((Number)right).floatValue(); + } else if (right instanceof Long) { + return (long)(char)left >= ((Number)right).longValue(); + } else { + return (int)(char)left >= ((Number)right).intValue(); + } + } else if (right instanceof Character) { + return (int)(char)left >= (int)(char)right; + } + } + + throw new ClassCastException("Cannot apply [>] operation to types " + + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); + } + + public static boolean DefToboolean(final Object value) { + if (value instanceof Boolean) { + return (boolean)value; + } else if (value instanceof Character) { + return ((char)value) != 0; + } else { + return ((Number)value).intValue() != 0; + } + } + + public static byte DefTobyte(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? (byte)1 : 0; + } else if (value instanceof Character) { + return (byte)(char)value; + } else { + return ((Number)value).byteValue(); + } + } + + public static short DefToshort(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? (short)1 : 0; + } else if (value instanceof Character) { + return (short)(char)value; + } else { + return ((Number)value).shortValue(); + } + } + + public static char DefTochar(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? (char)1 : 0; + } else if (value instanceof Character) { + return ((Character)value); + } else { + return (char)((Number)value).intValue(); + } + } + + public static int DefToint(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? 1 : 0; + } else if (value instanceof Character) { + return (int)(char)value; + } else { + return ((Number)value).intValue(); + } + } + + public static long DefTolong(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? 1L : 0; + } else if (value instanceof Character) { + return (long)(char)value; + } else { + return ((Number)value).longValue(); + } + } + + public static float DefTofloat(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? (float)1 : 0; + } else if (value instanceof Character) { + return (float)(char)value; + } else { + return ((Number)value).floatValue(); + } + } + + public static double DefTodouble(final Object value) { + if (value instanceof Boolean) { + return ((Boolean)value) ? (double)1 : 0; + } else if (value instanceof Character) { + return (double)(char)value; + } else { + return ((Number)value).doubleValue(); + } + } + + public static Boolean DefToBoolean(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return (boolean)value; + } else if (value instanceof Character) { + return ((char)value) != 0; + } else { + return ((Number)value).intValue() != 0; + } + } + + public static Byte DefToByte(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? (byte)1 : 0; + } else if (value instanceof Character) { + return (byte)(char)value; + } else { + return ((Number)value).byteValue(); + } + } + + public static Short DefToShort(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? (short)1 : 0; + } else if (value instanceof Character) { + return (short)(char)value; + } else { + return ((Number)value).shortValue(); + } + } + + public static Character DefToCharacter(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? (char)1 : 0; + } else if (value instanceof Character) { + return ((Character)value); + } else { + return (char)((Number)value).intValue(); + } + } + + public static Integer DefToInteger(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? 1 : 0; + } else if (value instanceof Character) { + return (int)(char)value; + } else { + return ((Number)value).intValue(); + } + } + + public static Long DefToLong(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? 1L : 0; + } else if (value instanceof Character) { + return (long)(char)value; + } else { + return ((Number)value).longValue(); + } + } + + public static Float DefToFloat(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? (float)1 : 0; + } else if (value instanceof Character) { + return (float)(char)value; + } else { + return ((Number)value).floatValue(); + } + } + + public static Double DefToDouble(final Object value) { + if (value == null) { + return null; + } else if (value instanceof Boolean) { + return ((Boolean)value) ? (double)1 : 0; + } else if (value instanceof Character) { + return (double)(char)value; + } else { + return ((Number)value).doubleValue(); + } + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java new file mode 100644 index 00000000000..5c52a202919 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java @@ -0,0 +1,1809 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class Definition { + enum Sort { + VOID( void.class , 0 , true , false , false , false ), + BOOL( boolean.class , 1 , true , true , false , true ), + BYTE( byte.class , 1 , true , false , true , true ), + SHORT( short.class , 1 , true , false , true , true ), + CHAR( char.class , 1 , true , false , true , true ), + INT( int.class , 1 , true , false , true , true ), + LONG( long.class , 2 , true , false , true , true ), + FLOAT( float.class , 1 , true , false , true , true ), + DOUBLE( double.class , 2 , true , false , true , true ), + + VOID_OBJ( Void.class , 1 , true , false , false , false ), + BOOL_OBJ( Boolean.class , 1 , false , true , false , false ), + BYTE_OBJ( Byte.class , 1 , false , false , true , false ), + SHORT_OBJ( Short.class , 1 , false , false , true , false ), + CHAR_OBJ( Character.class , 1 , false , false , true , false ), + INT_OBJ( Integer.class , 1 , false , false , true , false ), + LONG_OBJ( Long.class , 1 , false , false , true , false ), + FLOAT_OBJ( Float.class , 1 , false , false , true , false ), + DOUBLE_OBJ( Double.class , 1 , false , false , true , false ), + + NUMBER( Number.class , 1 , false , false , true , false ), + STRING( String.class , 1 , false , false , false , true ), + + OBJECT( null , 1 , false , false , false , false ), + DEF( null , 1 , false , false , false , false ), + ARRAY( null , 1 , false , false , false , false ); + + final Class clazz; + final int size; + final boolean primitive; + final boolean bool; + final boolean numeric; + final boolean constant; + + Sort(final Class clazz, final int size, final boolean primitive, + final boolean bool, final boolean numeric, final boolean constant) { + this.clazz = clazz; + this.size = size; + this.bool = bool; + this.primitive = primitive; + this.numeric = numeric; + this.constant = constant; + } + } + + static class Type { + final String name; + final Struct struct; + final Class clazz; + final org.objectweb.asm.Type type; + final Sort sort; + + private Type(final String name, final Struct struct, final Class clazz, + final org.objectweb.asm.Type type, final Sort sort) { + this.name = name; + this.struct = struct; + this.clazz = clazz; + this.type = type; + this.sort = sort; + } + + @Override + public boolean equals(final Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + final Type type = (Type)object; + + return this.type.equals(type.type) && struct.equals(type.struct); + } + + @Override + public int hashCode() { + int result = struct.hashCode(); + result = 31 * result + type.hashCode(); + + return result; + } + } + + static class Constructor { + final String name; + final Struct owner; + final List arguments; + final org.objectweb.asm.commons.Method method; + final java.lang.reflect.Constructor reflect; + + private Constructor(final String name, final Struct owner, final List arguments, + final org.objectweb.asm.commons.Method method, final java.lang.reflect.Constructor reflect) { + this.name = name; + this.owner = owner; + this.arguments = Collections.unmodifiableList(arguments); + this.method = method; + this.reflect = reflect; + } + } + + static class Method { + final String name; + final Struct owner; + final Type rtn; + final List arguments; + final org.objectweb.asm.commons.Method method; + final java.lang.reflect.Method reflect; + final MethodHandle handle; + + private Method(final String name, final Struct owner, final Type rtn, final List arguments, + final org.objectweb.asm.commons.Method method, final java.lang.reflect.Method reflect, + final MethodHandle handle) { + this.name = name; + this.owner = owner; + this.rtn = rtn; + this.arguments = Collections.unmodifiableList(arguments); + this.method = method; + this.reflect = reflect; + this.handle = handle; + } + } + + static class Field { + final String name; + final Struct owner; + final Type generic; + final Type type; + final java.lang.reflect.Field reflect; + final MethodHandle getter; + final MethodHandle setter; + + private Field(final String name, final Struct owner, final Type generic, final Type type, + final java.lang.reflect.Field reflect, final MethodHandle getter, final MethodHandle setter) { + this.name = name; + this.owner = owner; + this.generic = generic; + this.type = type; + this.reflect = reflect; + this.getter = getter; + this.setter = setter; + } + } + + static class Struct { + final String name; + final Class clazz; + final org.objectweb.asm.Type type; + + final Map constructors; + final Map functions; + final Map methods; + + final Map statics; + final Map members; + + private Struct(final String name, final Class clazz, final org.objectweb.asm.Type type) { + this.name = name; + this.clazz = clazz; + this.type = type; + + constructors = new HashMap<>(); + functions = new HashMap<>(); + methods = new HashMap<>(); + + statics = new HashMap<>(); + members = new HashMap<>(); + } + + private Struct(final Struct struct) { + name = struct.name; + clazz = struct.clazz; + type = struct.type; + + constructors = Collections.unmodifiableMap(struct.constructors); + functions = Collections.unmodifiableMap(struct.functions); + methods = Collections.unmodifiableMap(struct.methods); + + statics = Collections.unmodifiableMap(struct.statics); + members = Collections.unmodifiableMap(struct.members); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + Struct struct = (Struct)object; + + return name.equals(struct.name); + } + + @Override + public int hashCode() { + return name.hashCode(); + } + } + + static class Pair { + final Type type0; + final Type type1; + + Pair(final Type type0, final Type type1) { + this.type0 = type0; + this.type1 = type1; + } + + @Override + public boolean equals(final Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + final Pair pair = (Pair)object; + + return type0.equals(pair.type0) && type1.equals(pair.type1); + } + + @Override + public int hashCode() { + int result = type0.hashCode(); + result = 31 * result + type1.hashCode(); + + return result; + } + } + + static class Cast { + final Type from; + final Type to; + + Cast(final Type from, final Type to) { + this.from = from; + this.to = to; + } + + @Override + public boolean equals(final Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + final Cast cast = (Cast)object; + + return from.equals(cast.from) && to.equals(cast.to); + } + + @Override + public int hashCode() { + int result = from.hashCode(); + result = 31 * result + to.hashCode(); + + return result; + } + } + + static class Transform extends Cast { + final Cast cast; + final Method method; + final Type upcast; + final Type downcast; + + private Transform(final Cast cast, Method method, final Type upcast, final Type downcast) { + super(cast.from, cast.to); + + this.cast = cast; + this.method = method; + this.upcast = upcast; + this.downcast = downcast; + } + } + + final Map structs; + final Map, Struct> classes; + final Map transforms; + final Map bounds; + + final Type voidType; + final Type booleanType; + final Type byteType; + final Type shortType; + final Type charType; + final Type intType; + final Type longType; + final Type floatType; + final Type doubleType; + + final Type voidobjType; + final Type booleanobjType; + final Type byteobjType; + final Type shortobjType; + final Type charobjType; + final Type intobjType; + final Type longobjType; + final Type floatobjType; + final Type doubleobjType; + + final Type objectType; + final Type defType; + final Type numberType; + final Type charseqType; + final Type stringType; + final Type mathType; + final Type utilityType; + final Type defobjType; + + final Type listType; + final Type arraylistType; + final Type mapType; + final Type hashmapType; + + final Type olistType; + final Type oarraylistType; + final Type omapType; + final Type ohashmapType; + + final Type smapType; + final Type shashmapType; + final Type somapType; + final Type sohashmapType; + + final Type execType; + + public Definition() { + structs = new HashMap<>(); + classes = new HashMap<>(); + transforms = new HashMap<>(); + bounds = new HashMap<>(); + + addDefaultStructs(); + addDefaultClasses(); + + voidType = getType("void"); + booleanType = getType("boolean"); + byteType = getType("byte"); + shortType = getType("short"); + charType = getType("char"); + intType = getType("int"); + longType = getType("long"); + floatType = getType("float"); + doubleType = getType("double"); + + voidobjType = getType("Void"); + booleanobjType = getType("Boolean"); + byteobjType = getType("Byte"); + shortobjType = getType("Short"); + charobjType = getType("Character"); + intobjType = getType("Integer"); + longobjType = getType("Long"); + floatobjType = getType("Float"); + doubleobjType = getType("Double"); + + objectType = getType("Object"); + defType = getType("def"); + numberType = getType("Number"); + charseqType = getType("CharSequence"); + stringType = getType("String"); + mathType = getType("Math"); + utilityType = getType("Utility"); + defobjType = getType("Def"); + + listType = getType("List"); + arraylistType = getType("ArrayList"); + mapType = getType("Map"); + hashmapType = getType("HashMap"); + + olistType = getType("List"); + oarraylistType = getType("ArrayList"); + omapType = getType("Map"); + ohashmapType = getType("HashMap"); + + smapType = getType("Map"); + shashmapType = getType("HashMap"); + somapType = getType("Map"); + sohashmapType = getType("HashMap"); + + execType = getType("Executable"); + + addDefaultElements(); + copyDefaultStructs(); + addDefaultTransforms(); + addDefaultBounds(); + } + + Definition(final Definition definition) { + final Map structs = new HashMap<>(); + + for (final Struct struct : definition.structs.values()) { + structs.put(struct.name, new Struct(struct)); + } + + this.structs = Collections.unmodifiableMap(structs); + + final Map, Struct> classes = new HashMap<>(); + + for (final Struct struct : definition.classes.values()) { + classes.put(struct.clazz, this.structs.get(struct.name)); + } + + this.classes = Collections.unmodifiableMap(classes); + + transforms = Collections.unmodifiableMap(definition.transforms); + bounds = Collections.unmodifiableMap(definition.bounds); + + voidType = definition.voidType; + booleanType = definition.booleanType; + byteType = definition.byteType; + shortType = definition.shortType; + charType = definition.charType; + intType = definition.intType; + longType = definition.longType; + floatType = definition.floatType; + doubleType = definition.doubleType; + + voidobjType = definition.voidobjType; + booleanobjType = definition.booleanobjType; + byteobjType = definition.byteobjType; + shortobjType = definition.shortobjType; + charobjType = definition.charobjType; + intobjType = definition.intobjType; + longobjType = definition.longobjType; + floatobjType = definition.floatobjType; + doubleobjType = definition.doubleobjType; + + objectType = definition.objectType; + defType = definition.defType; + numberType = definition.numberType; + charseqType = definition.charseqType; + stringType = definition.stringType; + mathType = definition.mathType; + utilityType = definition.utilityType; + defobjType = definition.defobjType; + + listType = definition.listType; + arraylistType = definition.arraylistType; + mapType = definition.mapType; + hashmapType = definition.hashmapType; + + olistType = definition.olistType; + oarraylistType = definition.oarraylistType; + omapType = definition.omapType; + ohashmapType = definition.ohashmapType; + + smapType = definition.smapType; + shashmapType = definition.shashmapType; + somapType = definition.somapType; + sohashmapType = definition.sohashmapType; + + execType = definition.execType; + } + + private void addDefaultStructs() { + addStruct( "void" , void.class ); + addStruct( "boolean" , boolean.class ); + addStruct( "byte" , byte.class ); + addStruct( "short" , short.class ); + addStruct( "char" , char.class ); + addStruct( "int" , int.class ); + addStruct( "long" , long.class ); + addStruct( "float" , float.class ); + addStruct( "double" , double.class ); + + addStruct( "Void" , Void.class ); + addStruct( "Boolean" , Boolean.class ); + addStruct( "Byte" , Byte.class ); + addStruct( "Short" , Short.class ); + addStruct( "Character" , Character.class ); + addStruct( "Integer" , Integer.class ); + addStruct( "Long" , Long.class ); + addStruct( "Float" , Float.class ); + addStruct( "Double" , Double.class ); + + addStruct( "Object" , Object.class ); + addStruct( "def" , Object.class ); + addStruct( "Number" , Number.class ); + addStruct( "CharSequence" , CharSequence.class ); + addStruct( "String" , String.class ); + addStruct( "Math" , Math.class ); + addStruct( "Utility" , Utility.class ); + addStruct( "Def" , Def.class ); + + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + + addStruct( "Executable" , Executable.class ); + } + + private void addDefaultClasses() { + addClass("boolean"); + addClass("byte"); + addClass("short"); + addClass("char"); + addClass("int"); + addClass("long"); + addClass("float"); + addClass("double"); + + addClass("Boolean"); + addClass("Byte"); + addClass("Short"); + addClass("Character"); + addClass("Integer"); + addClass("Long"); + addClass("Float"); + addClass("Double"); + + addClass("Object"); + addClass("Number"); + addClass("CharSequence"); + addClass("String"); + + addClass("List"); + addClass("ArrayList"); + addClass("Map"); + addClass("HashMap"); + } + + private void addDefaultElements() { + addMethod("Object", "toString", null, false, stringType, new Type[] {}, null, null); + addMethod("Object", "equals", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Object", "hashCode", null, false, intType, new Type[] {}, null, null); + + addMethod("def", "toString", null, false, stringType, new Type[] {}, null, null); + addMethod("def", "equals", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("def", "hashCode", null, false, intType, new Type[] {}, null, null); + + addConstructor("Boolean", "new", new Type[] {booleanType}, null); + addMethod("Boolean", "valueOf", null, true, booleanobjType, new Type[] {booleanType}, null, null); + addMethod("Boolean", "booleanValue", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("Byte", "new", new Type[]{byteType}, null); + addMethod("Byte", "valueOf", null, true, byteobjType, new Type[] {byteType}, null, null); + addMethod("Byte", "byteValue", null, false, byteType, new Type[] {}, null, null); + addField("Byte", "MIN_VALUE", null, true, byteType, null); + addField("Byte", "MAX_VALUE", null, true, byteType, null); + + addConstructor("Short", "new", new Type[]{shortType}, null); + addMethod("Short", "valueOf", null, true, shortobjType, new Type[] {shortType}, null, null); + addMethod("Short", "shortValue", null, false, shortType, new Type[] {}, null, null); + addField("Short", "MIN_VALUE", null, true, shortType, null); + addField("Short", "MAX_VALUE", null, true, shortType, null); + + addConstructor("Character", "new", new Type[]{charType}, null); + addMethod("Character", "valueOf", null, true, charobjType, new Type[] {charType}, null, null); + addMethod("Character", "charValue", null, false, charType, new Type[] {}, null, null); + addField("Character", "MIN_VALUE", null, true, charType, null); + addField("Character", "MAX_VALUE", null, true, charType, null); + + addConstructor("Integer", "new", new Type[]{intType}, null); + addMethod("Integer", "valueOf", null, true, intobjType, new Type[] {intType}, null, null); + addMethod("Integer", "intValue", null, false, intType, new Type[] {}, null, null); + addField("Integer", "MIN_VALUE", null, true, intType, null); + addField("Integer", "MAX_VALUE", null, true, intType, null); + + addConstructor("Long", "new", new Type[]{longType}, null); + addMethod("Long", "valueOf", null, true, longobjType, new Type[] {longType}, null, null); + addMethod("Long", "longValue", null, false, longType, new Type[] {}, null, null); + addField("Long", "MIN_VALUE", null, true, longType, null); + addField("Long", "MAX_VALUE", null, true, longType, null); + + addConstructor("Float", "new", new Type[]{floatType}, null); + addMethod("Float", "valueOf", null, true, floatobjType, new Type[] {floatType}, null, null); + addMethod("Float", "floatValue", null, false, floatType, new Type[] {}, null, null); + addField("Float", "MIN_VALUE", null, true, floatType, null); + addField("Float", "MAX_VALUE", null, true, floatType, null); + + addConstructor("Double", "new", new Type[]{doubleType}, null); + addMethod("Double", "valueOf", null, true, doubleobjType, new Type[] {doubleType}, null, null); + addMethod("Double", "doubleValue", null, false, doubleType, new Type[] {}, null, null); + addField("Double", "MIN_VALUE", null, true, doubleType, null); + addField("Double", "MAX_VALUE", null, true, doubleType, null); + + addMethod("Number", "byteValue", null, false, byteType, new Type[] {}, null, null); + addMethod("Number", "shortValue", null, false, shortType, new Type[] {}, null, null); + addMethod("Number", "intValue", null, false, intType, new Type[] {}, null, null); + addMethod("Number", "longValue", null, false, longType, new Type[] {}, null, null); + addMethod("Number", "floatValue", null, false, floatType, new Type[] {}, null, null); + addMethod("Number", "doubleValue", null, false, doubleType, new Type[] {}, null, null); + + addMethod("CharSequence", "charAt", null, false, charType, new Type[] {intType}, null, null); + addMethod("CharSequence", "length", null, false, intType, new Type[] {}, null, null); + + addConstructor("String", "new", new Type[] {}, null); + addMethod("String", "codePointAt", null, false, intType, new Type[] {intType}, null, null); + addMethod("String", "compareTo", null, false, intType, new Type[] {stringType}, null, null); + addMethod("String", "concat", null, false, stringType, new Type[] {stringType}, null, null); + addMethod("String", "endsWith", null, false, booleanType, new Type[] {stringType}, null, null); + addMethod("String", "indexOf", null, false, intType, new Type[] {stringType, intType}, null, null); + addMethod("String", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("String", "replace", null, false, stringType, new Type[] {charseqType, charseqType}, null, null); + addMethod("String", "startsWith", null, false, booleanType, new Type[] {stringType}, null, null); + addMethod("String", "substring", null, false, stringType, new Type[] {intType, intType}, null, null); + addMethod("String", "toCharArray", null, false, getType(charType.struct, 1), new Type[] {}, null, null); + addMethod("String", "trim", null, false, stringType, new Type[] {}, null, null); + + addMethod("Utility", "NumberToboolean", null, true, booleanType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberTochar", null, true, charType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToBoolean", null, true, booleanobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToByte", null, true, byteobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToShort", null, true, shortobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToCharacter", null, true, charobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToInteger", null, true, intobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToLong", null, true, longobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToFloat", null, true, floatobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "NumberToDouble", null, true, doubleobjType, new Type[] {numberType}, null, null); + addMethod("Utility", "booleanTobyte", null, true, byteType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanToshort", null, true, shortType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanTochar", null, true, charType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanToint", null, true, intType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanTolong", null, true, longType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanTofloat", null, true, floatType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanTodouble", null, true, doubleType, new Type[] {booleanType}, null, null); + addMethod("Utility", "booleanToInteger", null, true, intobjType, new Type[] {booleanType}, null, null); + addMethod("Utility", "BooleanTobyte", null, true, byteType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToshort", null, true, shortType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanTochar", null, true, charType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToint", null, true, intType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanTolong", null, true, longType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanTofloat", null, true, floatType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanTodouble", null, true, doubleType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToByte", null, true, byteobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToShort", null, true, shortobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToCharacter", null, true, charobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToInteger", null, true, intobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToLong", null, true, longobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToFloat", null, true, floatobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "BooleanToDouble", null, true, doubleobjType, new Type[] {booleanobjType}, null, null); + addMethod("Utility", "byteToboolean", null, true, booleanType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToShort", null, true, shortobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToCharacter", null, true, charobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToInteger", null, true, intobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToLong", null, true, longobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToFloat", null, true, floatobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "byteToDouble", null, true, doubleobjType, new Type[] {byteType}, null, null); + addMethod("Utility", "ByteToboolean", null, true, booleanType, new Type[] {byteobjType}, null, null); + addMethod("Utility", "ByteTochar", null, true, charType, new Type[] {byteobjType}, null, null); + addMethod("Utility", "shortToboolean", null, true, booleanType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToByte", null, true, byteobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToCharacter", null, true, charobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToInteger", null, true, intobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToLong", null, true, longobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToFloat", null, true, floatobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "shortToDouble", null, true, doubleobjType, new Type[] {shortType}, null, null); + addMethod("Utility", "ShortToboolean", null, true, booleanType, new Type[] {shortobjType}, null, null); + addMethod("Utility", "ShortTochar", null, true, charType, new Type[] {shortobjType}, null, null); + addMethod("Utility", "charToboolean", null, true, booleanType, new Type[] {charType}, null, null); + addMethod("Utility", "charToByte", null, true, byteobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToShort", null, true, shortobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToInteger", null, true, intobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToLong", null, true, longobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToFloat", null, true, floatobjType, new Type[] {charType}, null, null); + addMethod("Utility", "charToDouble", null, true, doubleobjType, new Type[] {charType}, null, null); + addMethod("Utility", "CharacterToboolean", null, true, booleanType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterTobyte", null, true, byteType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToshort", null, true, shortType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToint", null, true, intType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterTolong", null, true, longType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterTofloat", null, true, floatType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterTodouble", null, true, doubleType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToBoolean", null, true, booleanobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToByte", null, true, byteobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToShort", null, true, shortobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToInteger", null, true, intobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToLong", null, true, longobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToFloat", null, true, floatobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "CharacterToDouble", null, true, doubleobjType, new Type[] {charobjType}, null, null); + addMethod("Utility", "intToboolean", null, true, booleanType, new Type[] {intType}, null, null); + addMethod("Utility", "intToByte", null, true, byteobjType, new Type[] {intType}, null, null); + addMethod("Utility", "intToShort", null, true, shortobjType, new Type[] {intType}, null, null); + addMethod("Utility", "intToCharacter", null, true, charobjType, new Type[] {intType}, null, null); + addMethod("Utility", "intToLong", null, true, longobjType, new Type[] {intType}, null, null); + addMethod("Utility", "intToFloat", null, true, floatobjType, new Type[] {intType}, null, null); + addMethod("Utility", "intToDouble", null, true, doubleobjType, new Type[] {intType}, null, null); + addMethod("Utility", "IntegerToboolean", null, true, booleanType, new Type[] {intobjType}, null, null); + addMethod("Utility", "IntegerTochar", null, true, charType, new Type[] {intobjType}, null, null); + addMethod("Utility", "longToboolean", null, true, booleanType, new Type[] {longType}, null, null); + addMethod("Utility", "longToByte", null, true, byteobjType, new Type[] {longType}, null, null); + addMethod("Utility", "longToShort", null, true, shortobjType, new Type[] {longType}, null, null); + addMethod("Utility", "longToCharacter", null, true, charobjType, new Type[] {longType}, null, null); + addMethod("Utility", "longToInteger", null, true, intobjType, new Type[] {longType}, null, null); + addMethod("Utility", "longToFloat", null, true, floatobjType, new Type[] {longType}, null, null); + addMethod("Utility", "longToDouble", null, true, doubleobjType, new Type[] {longType}, null, null); + addMethod("Utility", "LongToboolean", null, true, booleanType, new Type[] {longobjType}, null, null); + addMethod("Utility", "LongTochar", null, true, charType, new Type[] {longobjType}, null, null); + addMethod("Utility", "floatToboolean", null, true, booleanType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToByte", null, true, byteobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToShort", null, true, shortobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToCharacter", null, true, charobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToInteger", null, true, intobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToLong", null, true, longobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "floatToDouble", null, true, doubleobjType, new Type[] {floatType}, null, null); + addMethod("Utility", "FloatToboolean", null, true, booleanType, new Type[] {floatobjType}, null, null); + addMethod("Utility", "FloatTochar", null, true, charType, new Type[] {floatobjType}, null, null); + addMethod("Utility", "doubleToboolean", null, true, booleanType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToByte", null, true, byteobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToShort", null, true, shortobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToCharacter", null, true, charobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToInteger", null, true, intobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToLong", null, true, longobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "doubleToFloat", null, true, floatobjType, new Type[] {doubleType}, null, null); + addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); + addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); + + addMethod("Math", "dmax", "max", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + + addMethod("Def", "DefToboolean", null, true, booleanType, new Type[] {defType}, null, null); + addMethod("Def", "DefTobyte", null, true, byteType, new Type[] {defType}, null, null); + addMethod("Def", "DefToshort", null, true, shortType, new Type[] {defType}, null, null); + addMethod("Def", "DefTochar", null, true, charType, new Type[] {defType}, null, null); + addMethod("Def", "DefToint", null, true, intType, new Type[] {defType}, null, null); + addMethod("Def", "DefTolong", null, true, longType, new Type[] {defType}, null, null); + addMethod("Def", "DefTofloat", null, true, floatType, new Type[] {defType}, null, null); + addMethod("Def", "DefTodouble", null, true, doubleType, new Type[] {defType}, null, null); + addMethod("Def", "DefToBoolean", null, true, booleanobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToByte", null, true, byteobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToShort", null, true, shortobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToCharacter", null, true, charobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToInteger", null, true, intobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToLong", null, true, longobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToFloat", null, true, floatobjType, new Type[] {defType}, null, null); + addMethod("Def", "DefToDouble", null, true, doubleobjType, new Type[] {defType}, null, null); + + addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, new Type[] {intType, defType}); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, defType, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, defType, null); + addMethod("List", "size", null, false, intType, new Type[] {}, null, null); + addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {defType, defType}); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {defType}); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType}); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + + addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, null); + addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, null); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); + addMethod("List", "size", null, false, intType, new Type[] {}, null, null); + addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, null); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, null); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, new Type[] {stringType, objectType}); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + } + + private void copyDefaultStructs() { + copyStruct("Void", "Object"); + copyStruct("Boolean", "Object"); + copyStruct("Byte", "Number", "Object"); + copyStruct("Short", "Number", "Object"); + copyStruct("Character", "Object"); + copyStruct("Integer", "Number", "Object"); + copyStruct("Long", "Number", "Object"); + copyStruct("Float", "Number", "Object"); + copyStruct("Double", "Number", "Object"); + + copyStruct("Number", "Object"); + copyStruct("CharSequence", "Object"); + copyStruct("String", "CharSequence", "Object"); + + copyStruct("List", "Object"); + copyStruct("ArrayList", "List", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); + + copyStruct("List", "Object"); + copyStruct("ArrayList", "List", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); + + copyStruct("Executable", "Object"); + } + + private void addDefaultTransforms() { + addTransform(booleanType, byteType, "Utility", "booleanTobyte", true); + addTransform(booleanType, shortType, "Utility", "booleanToshort", true); + addTransform(booleanType, charType, "Utility", "booleanTochar", true); + addTransform(booleanType, intType, "Utility", "booleanToint", true); + addTransform(booleanType, longType, "Utility", "booleanTolong", true); + addTransform(booleanType, floatType, "Utility", "booleanTofloat", true); + addTransform(booleanType, doubleType, "Utility", "booleanTodouble", true); + addTransform(booleanType, objectType, "Boolean", "valueOf", true); + addTransform(booleanType, defType, "Boolean", "valueOf", true); + addTransform(booleanType, numberType, "Utility", "booleanToInteger", true); + addTransform(booleanType, booleanobjType, "Boolean", "valueOf", true); + + addTransform(byteType, booleanType, "Utility", "byteToboolean", true); + addTransform(byteType, objectType, "Byte", "valueOf", true); + addTransform(byteType, defType, "Byte", "valueOf", true); + addTransform(byteType, numberType, "Byte", "valueOf", true); + addTransform(byteType, byteobjType, "Byte", "valueOf", true); + addTransform(byteType, shortobjType, "Utility", "byteToShort", true); + addTransform(byteType, charobjType, "Utility", "byteToCharacter", true); + addTransform(byteType, intobjType, "Utility", "byteToInteger", true); + addTransform(byteType, longobjType, "Utility", "byteToLong", true); + addTransform(byteType, floatobjType, "Utility", "byteToFloat", true); + addTransform(byteType, doubleobjType, "Utility", "byteToDouble", true); + + addTransform(shortType, booleanType, "Utility", "shortToboolean", true); + addTransform(shortType, objectType, "Short", "valueOf", true); + addTransform(shortType, defType, "Short", "valueOf", true); + addTransform(shortType, numberType, "Short", "valueOf", true); + addTransform(shortType, byteobjType, "Utility", "shortToByte", true); + addTransform(shortType, shortobjType, "Short", "valueOf", true); + addTransform(shortType, charobjType, "Utility", "shortToCharacter", true); + addTransform(shortType, intobjType, "Utility", "shortToInteger", true); + addTransform(shortType, longobjType, "Utility", "shortToLong", true); + addTransform(shortType, floatobjType, "Utility", "shortToFloat", true); + addTransform(shortType, doubleobjType, "Utility", "shortToDouble", true); + + addTransform(charType, booleanType, "Utility", "charToboolean", true); + addTransform(charType, objectType, "Character", "valueOf", true); + addTransform(charType, defType, "Character", "valueOf", true); + addTransform(charType, numberType, "Utility", "charToInteger", true); + addTransform(charType, byteobjType, "Utility", "charToByte", true); + addTransform(charType, shortobjType, "Utility", "charToShort", true); + addTransform(charType, charobjType, "Character", "valueOf", true); + addTransform(charType, intobjType, "Utility", "charToInteger", true); + addTransform(charType, longobjType, "Utility", "charToLong", true); + addTransform(charType, floatobjType, "Utility", "charToFloat", true); + addTransform(charType, doubleobjType, "Utility", "charToDouble", true); + + addTransform(intType, booleanType, "Utility", "intToboolean", true); + addTransform(intType, objectType, "Integer", "valueOf", true); + addTransform(intType, defType, "Integer", "valueOf", true); + addTransform(intType, numberType, "Integer", "valueOf", true); + addTransform(intType, byteobjType, "Utility", "intToByte", true); + addTransform(intType, shortobjType, "Utility", "intToShort", true); + addTransform(intType, charobjType, "Utility", "intToCharacter", true); + addTransform(intType, intobjType, "Integer", "valueOf", true); + addTransform(intType, longobjType, "Utility", "intToLong", true); + addTransform(intType, floatobjType, "Utility", "intToFloat", true); + addTransform(intType, doubleobjType, "Utility", "intToDouble", true); + + addTransform(longType, booleanType, "Utility", "longToboolean", true); + addTransform(longType, objectType, "Long", "valueOf", true); + addTransform(longType, defType, "Long", "valueOf", true); + addTransform(longType, numberType, "Long", "valueOf", true); + addTransform(longType, byteobjType, "Utility", "longToByte", true); + addTransform(longType, shortobjType, "Utility", "longToShort", true); + addTransform(longType, charobjType, "Utility", "longToCharacter", true); + addTransform(longType, intobjType, "Utility", "longToInteger", true); + addTransform(longType, longobjType, "Long", "valueOf", true); + addTransform(longType, floatobjType, "Utility", "longToFloat", true); + addTransform(longType, doubleobjType, "Utility", "longToDouble", true); + + addTransform(floatType, booleanType, "Utility", "floatToboolean", true); + addTransform(floatType, objectType, "Float", "valueOf", true); + addTransform(floatType, defType, "Float", "valueOf", true); + addTransform(floatType, numberType, "Float", "valueOf", true); + addTransform(floatType, byteobjType, "Utility", "floatToByte", true); + addTransform(floatType, shortobjType, "Utility", "floatToShort", true); + addTransform(floatType, charobjType, "Utility", "floatToCharacter", true); + addTransform(floatType, intobjType, "Utility", "floatToInteger", true); + addTransform(floatType, longobjType, "Utility", "floatToLong", true); + addTransform(floatType, floatobjType, "Float", "valueOf", true); + addTransform(floatType, doubleobjType, "Utility", "floatToDouble", true); + + addTransform(doubleType, booleanType, "Utility", "doubleToboolean", true); + addTransform(doubleType, objectType, "Double", "valueOf", true); + addTransform(doubleType, defType, "Double", "valueOf", true); + addTransform(doubleType, numberType, "Double", "valueOf", true); + addTransform(doubleType, byteobjType, "Utility", "doubleToByte", true); + addTransform(doubleType, shortobjType, "Utility", "doubleToShort", true); + addTransform(doubleType, charobjType, "Utility", "doubleToCharacter", true); + addTransform(doubleType, intobjType, "Utility", "doubleToInteger", true); + addTransform(doubleType, longobjType, "Utility", "doubleToLong", true); + addTransform(doubleType, floatobjType, "Utility", "doubleToFloat", true); + addTransform(doubleType, doubleobjType, "Double", "valueOf", true); + + addTransform(objectType, booleanType, "Boolean", "booleanValue", false); + addTransform(objectType, byteType, "Number", "byteValue", false); + addTransform(objectType, shortType, "Number", "shortValue", false); + addTransform(objectType, charType, "Character", "charValue", false); + addTransform(objectType, intType, "Number", "intValue", false); + addTransform(objectType, longType, "Number", "longValue", false); + addTransform(objectType, floatType, "Number", "floatValue", false); + addTransform(objectType, doubleType, "Number", "doubleValue", false); + + addTransform(defType, booleanType, "Def", "DefToboolean", true); + addTransform(defType, byteType, "Def", "DefTobyte", true); + addTransform(defType, shortType, "Def", "DefToshort", true); + addTransform(defType, charType, "Def", "DefTochar", true); + addTransform(defType, intType, "Def", "DefToint", true); + addTransform(defType, longType, "Def", "DefTolong", true); + addTransform(defType, floatType, "Def", "DefTofloat", true); + addTransform(defType, doubleType, "Def", "DefTodouble", true); + addTransform(defType, booleanobjType, "Def", "DefToBoolean", true); + addTransform(defType, byteobjType, "Def", "DefToByte", true); + addTransform(defType, shortobjType, "Def", "DefToShort", true); + addTransform(defType, charobjType, "Def", "DefToCharacter", true); + addTransform(defType, intobjType, "Def", "DefToInteger", true); + addTransform(defType, longobjType, "Def", "DefToLong", true); + addTransform(defType, floatobjType, "Def", "DefToFloat", true); + addTransform(defType, doubleobjType, "Def", "DefToDouble", true); + + addTransform(numberType, booleanType, "Utility", "NumberToboolean", true); + addTransform(numberType, byteType, "Number", "byteValue", false); + addTransform(numberType, shortType, "Number", "shortValue", false); + addTransform(numberType, charType, "Utility", "NumberTochar", true); + addTransform(numberType, intType, "Number", "intValue", false); + addTransform(numberType, longType, "Number", "longValue", false); + addTransform(numberType, floatType, "Number", "floatValue", false); + addTransform(numberType, doubleType, "Number", "doubleValue", false); + addTransform(numberType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(numberType, byteobjType, "Utility", "NumberToByte", true); + addTransform(numberType, shortobjType, "Utility", "NumberToShort", true); + addTransform(numberType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(numberType, intobjType, "Utility", "NumberToInteger", true); + addTransform(numberType, longobjType, "Utility", "NumberToLong", true); + addTransform(numberType, floatobjType, "Utility", "NumberToFloat", true); + addTransform(numberType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(booleanobjType, booleanType, "Boolean", "booleanValue", false); + addTransform(booleanobjType, byteType, "Utility", "BooleanTobyte", true); + addTransform(booleanobjType, shortType, "Utility", "BooleanToshort", true); + addTransform(booleanobjType, charType, "Utility", "BooleanTochar", true); + addTransform(booleanobjType, intType, "Utility", "BooleanToint", true); + addTransform(booleanobjType, longType, "Utility", "BooleanTolong", true); + addTransform(booleanobjType, floatType, "Utility", "BooleanTofloat", true); + addTransform(booleanobjType, doubleType, "Utility", "BooleanTodouble", true); + addTransform(booleanobjType, numberType, "Utility", "BooleanToLong", true); + addTransform(booleanobjType, byteobjType, "Utility", "BooleanToByte", true); + addTransform(booleanobjType, shortobjType, "Utility", "BooleanToShort", true); + addTransform(booleanobjType, charobjType, "Utility", "BooleanToCharacter", true); + addTransform(booleanobjType, intobjType, "Utility", "BooleanToInteger", true); + addTransform(booleanobjType, longobjType, "Utility", "BooleanToLong", true); + addTransform(booleanobjType, floatobjType, "Utility", "BooleanToFloat", true); + addTransform(booleanobjType, doubleobjType, "Utility", "BooleanToDouble", true); + + addTransform(byteobjType, booleanType, "Utility", "ByteToboolean", true); + addTransform(byteobjType, byteType, "Byte", "byteValue", false); + addTransform(byteobjType, shortType, "Byte", "shortValue", false); + addTransform(byteobjType, charType, "Utility", "ByteTochar", true); + addTransform(byteobjType, intType, "Byte", "intValue", false); + addTransform(byteobjType, longType, "Byte", "longValue", false); + addTransform(byteobjType, floatType, "Byte", "floatValue", false); + addTransform(byteobjType, doubleType, "Byte", "doubleValue", false); + addTransform(byteobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(byteobjType, shortobjType, "Utility", "NumberToShort", true); + addTransform(byteobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(byteobjType, intobjType, "Utility", "NumberToInteger", true); + addTransform(byteobjType, longobjType, "Utility", "NumberToLong", true); + addTransform(byteobjType, floatobjType, "Utility", "NumberToFloat", true); + addTransform(byteobjType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(shortobjType, booleanType, "Utility", "ShortToboolean", true); + addTransform(shortobjType, byteType, "Short", "byteValue", false); + addTransform(shortobjType, shortType, "Short", "shortValue", false); + addTransform(shortobjType, charType, "Utility", "ShortTochar", true); + addTransform(shortobjType, intType, "Short", "intValue", false); + addTransform(shortobjType, longType, "Short", "longValue", false); + addTransform(shortobjType, floatType, "Short", "floatValue", false); + addTransform(shortobjType, doubleType, "Short", "doubleValue", false); + addTransform(shortobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(shortobjType, byteobjType, "Utility", "NumberToByte", true); + addTransform(shortobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(shortobjType, intobjType, "Utility", "NumberToInteger", true); + addTransform(shortobjType, longobjType, "Utility", "NumberToLong", true); + addTransform(shortobjType, floatobjType, "Utility", "NumberToFloat", true); + addTransform(shortobjType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(charobjType, booleanType, "Utility", "CharacterToboolean", true); + addTransform(charobjType, byteType, "Utility", "CharacterTobyte", true); + addTransform(charobjType, shortType, "Utility", "CharacterToshort", true); + addTransform(charobjType, charType, "Character", "charValue", false); + addTransform(charobjType, intType, "Utility", "CharacterToint", true); + addTransform(charobjType, longType, "Utility", "CharacterTolong", true); + addTransform(charobjType, floatType, "Utility", "CharacterTofloat", true); + addTransform(charobjType, doubleType, "Utility", "CharacterTodouble", true); + addTransform(charobjType, booleanobjType, "Utility", "CharacterToBoolean", true); + addTransform(charobjType, byteobjType, "Utility", "CharacterToByte", true); + addTransform(charobjType, shortobjType, "Utility", "CharacterToShort", true); + addTransform(charobjType, intobjType, "Utility", "CharacterToInteger", true); + addTransform(charobjType, longobjType, "Utility", "CharacterToLong", true); + addTransform(charobjType, floatobjType, "Utility", "CharacterToFloat", true); + addTransform(charobjType, doubleobjType, "Utility", "CharacterToDouble", true); + + addTransform(intobjType, booleanType, "Utility", "IntegerToboolean", true); + addTransform(intobjType, byteType, "Integer", "byteValue", false); + addTransform(intobjType, shortType, "Integer", "shortValue", false); + addTransform(intobjType, charType, "Utility", "IntegerTochar", true); + addTransform(intobjType, intType, "Integer", "intValue", false); + addTransform(intobjType, longType, "Integer", "longValue", false); + addTransform(intobjType, floatType, "Integer", "floatValue", false); + addTransform(intobjType, doubleType, "Integer", "doubleValue", false); + addTransform(intobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(intobjType, byteobjType, "Utility", "NumberToByte", true); + addTransform(intobjType, shortobjType, "Utility", "NumberToShort", true); + addTransform(intobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(intobjType, longobjType, "Utility", "NumberToLong", true); + addTransform(intobjType, floatobjType, "Utility", "NumberToFloat", true); + addTransform(intobjType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(longobjType, booleanType, "Utility", "LongToboolean", true); + addTransform(longobjType, byteType, "Long", "byteValue", false); + addTransform(longobjType, shortType, "Long", "shortValue", false); + addTransform(longobjType, charType, "Utility", "LongTochar", true); + addTransform(longobjType, intType, "Long", "intValue", false); + addTransform(longobjType, longType, "Long", "longValue", false); + addTransform(longobjType, floatType, "Long", "floatValue", false); + addTransform(longobjType, doubleType, "Long", "doubleValue", false); + addTransform(longobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(longobjType, byteobjType, "Utility", "NumberToByte", true); + addTransform(longobjType, shortobjType, "Utility", "NumberToShort", true); + addTransform(longobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(longobjType, intobjType, "Utility", "NumberToInteger", true); + addTransform(longobjType, floatobjType, "Utility", "NumberToFloat", true); + addTransform(longobjType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(floatobjType, booleanType, "Utility", "FloatToboolean", true); + addTransform(floatobjType, byteType, "Float", "byteValue", false); + addTransform(floatobjType, shortType, "Float", "shortValue", false); + addTransform(floatobjType, charType, "Utility", "FloatTochar", true); + addTransform(floatobjType, intType, "Float", "intValue", false); + addTransform(floatobjType, longType, "Float", "longValue", false); + addTransform(floatobjType, floatType, "Float", "floatValue", false); + addTransform(floatobjType, doubleType, "Float", "doubleValue", false); + addTransform(floatobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(floatobjType, byteobjType, "Utility", "NumberToByte", true); + addTransform(floatobjType, shortobjType, "Utility", "NumberToShort", true); + addTransform(floatobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(floatobjType, intobjType, "Utility", "NumberToInteger", true); + addTransform(floatobjType, longobjType, "Utility", "NumberToLong", true); + addTransform(floatobjType, doubleobjType, "Utility", "NumberToDouble", true); + + addTransform(doubleobjType, booleanType, "Utility", "DoubleToboolean", true); + addTransform(doubleobjType, byteType, "Double", "byteValue", false); + addTransform(doubleobjType, shortType, "Double", "shortValue", false); + addTransform(doubleobjType, charType, "Utility", "DoubleTochar", true); + addTransform(doubleobjType, intType, "Double", "intValue", false); + addTransform(doubleobjType, longType, "Double", "longValue", false); + addTransform(doubleobjType, floatType, "Double", "floatValue", false); + addTransform(doubleobjType, doubleType, "Double", "doubleValue", false); + addTransform(doubleobjType, booleanobjType, "Utility", "NumberToBoolean", true); + addTransform(doubleobjType, byteobjType, "Utility", "NumberToByte", true); + addTransform(doubleobjType, shortobjType, "Utility", "NumberToShort", true); + addTransform(doubleobjType, charobjType, "Utility", "NumberToCharacter", true); + addTransform(doubleobjType, intobjType, "Utility", "NumberToInteger", true); + addTransform(doubleobjType, longobjType, "Utility", "NumberToLong", true); + addTransform(doubleobjType, floatobjType, "Utility", "NumberToFloat", true); + } + + private void addDefaultBounds() { + addBound(byteobjType, numberType, numberType); + + addBound(shortobjType, numberType, numberType); + addBound(shortobjType, byteobjType, numberType); + + addBound(intobjType, numberType, numberType); + addBound(intobjType, byteobjType, numberType); + addBound(intobjType, shortobjType, numberType); + + addBound(longobjType, numberType, numberType); + addBound(longobjType, byteobjType, numberType); + addBound(longobjType, shortobjType, numberType); + addBound(longobjType, intobjType, numberType); + + addBound(floatobjType, numberType, numberType); + addBound(floatobjType, byteobjType, numberType); + addBound(floatobjType, shortobjType, numberType); + addBound(floatobjType, intobjType, numberType); + addBound(floatobjType, longobjType, numberType); + + addBound(doubleobjType, numberType, numberType); + addBound(doubleobjType, byteobjType, numberType); + addBound(doubleobjType, shortobjType, numberType); + addBound(doubleobjType, intobjType, numberType); + addBound(doubleobjType, longobjType, numberType); + addBound(doubleobjType, floatobjType, numberType); + + addBound(stringType, charseqType, charseqType); + + addBound(arraylistType, listType, listType); + addBound(olistType, listType, listType); + addBound(olistType, arraylistType, listType); + addBound(oarraylistType, listType, listType); + addBound(oarraylistType, olistType, olistType); + addBound(oarraylistType, arraylistType, arraylistType); + + addBound(hashmapType, mapType, mapType); + addBound(omapType, mapType, mapType); + addBound(omapType, hashmapType, mapType); + addBound(ohashmapType, mapType, mapType); + addBound(ohashmapType, hashmapType, hashmapType); + addBound(ohashmapType, omapType, omapType); + addBound(smapType, mapType, mapType); + addBound(smapType, hashmapType, mapType); + addBound(smapType, omapType, omapType); + addBound(smapType, ohashmapType, omapType); + addBound(shashmapType, mapType, mapType); + addBound(shashmapType, hashmapType, hashmapType); + addBound(shashmapType, omapType, omapType); + addBound(shashmapType, ohashmapType, ohashmapType); + addBound(shashmapType, smapType, smapType); + addBound(somapType, mapType, mapType); + addBound(somapType, hashmapType, mapType); + addBound(somapType, omapType, omapType); + addBound(somapType, ohashmapType, omapType); + addBound(somapType, smapType, smapType); + addBound(somapType, shashmapType, smapType); + addBound(sohashmapType, mapType, mapType); + addBound(sohashmapType, hashmapType, hashmapType); + addBound(sohashmapType, omapType, omapType); + addBound(sohashmapType, ohashmapType, ohashmapType); + addBound(sohashmapType, smapType, smapType); + addBound(sohashmapType, shashmapType, shashmapType); + addBound(sohashmapType, somapType, somapType); + } + + public final void addStruct(final String name, final Class clazz) { + if (!name.matches("^[_a-zA-Z][<>,_a-zA-Z0-9]*$")) { + throw new IllegalArgumentException("Invalid struct name [" + name + "]."); + } + + if (structs.containsKey(name)) { + throw new IllegalArgumentException("Duplicate struct name [" + name + "]."); + } + + final Struct struct = new Struct(name, clazz, org.objectweb.asm.Type.getType(clazz)); + + structs.put(name, struct); + } + + public final void addClass(final String name) { + final Struct struct = structs.get(name); + + if (struct == null) { + throw new IllegalArgumentException("Struct [" + name + "] is not defined."); + } + + if (classes.containsKey(struct.clazz)) { + throw new IllegalArgumentException("Duplicate struct class [" + struct.clazz + "] when defining dynamic."); + } + + classes.put(struct.clazz, struct); + } + + public final void addConstructor(final String struct, final String name, final Type[] args, final Type[] genargs) { + final Struct owner = structs.get(struct); + + if (owner == null) { + throw new IllegalArgumentException( + "Owner struct [" + struct + "] not defined for constructor [" + name + "]."); + } + + if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { + throw new IllegalArgumentException( + "Invalid constructor name [" + name + "] with the struct [" + owner.name + "]."); + } + + if (owner.constructors.containsKey(name)) { + throw new IllegalArgumentException( + "Duplicate constructor name [" + name + "] found within the struct [" + owner.name + "]."); + } + + if (owner.statics.containsKey(name)) { + throw new IllegalArgumentException("Constructors and functions may not have the same name" + + " [" + name + "] within the same struct [" + owner.name + "]."); + } + + if (owner.methods.containsKey(name)) { + throw new IllegalArgumentException("Constructors and methods may not have the same name" + + " [" + name + "] within the same struct [" + owner.name + "]."); + } + + final Class[] classes = new Class[args.length]; + + for (int count = 0; count < classes.length; ++count) { + if (genargs != null) { + try { + genargs[count].clazz.asSubclass(args[count].clazz); + } catch (ClassCastException exception) { + throw new ClassCastException("Generic argument [" + genargs[count].name + "]" + + " is not a sub class of [" + args[count].name + "] in the constructor" + + " [" + name + " ] from the struct [" + owner.name + "]."); + } + } + + classes[count] = args[count].clazz; + } + + final java.lang.reflect.Constructor reflect; + + try { + reflect = owner.clazz.getConstructor(classes); + } catch (NoSuchMethodException exception) { + throw new IllegalArgumentException("Constructor [" + name + "] not found for class" + + " [" + owner.clazz.getName() + "] with arguments " + Arrays.toString(classes) + "."); + } + + final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); + final Constructor constructor = + new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); + + owner.constructors.put(name, constructor); + } + + public final void addMethod(final String struct, final String name, final String alias, final boolean statik, + final Type rtn, final Type[] args, final Type genrtn, final Type[] genargs) { + final Struct owner = structs.get(struct); + + if (owner == null) { + throw new IllegalArgumentException("Owner struct [" + struct + "] not defined" + + " for " + (statik ? "function" : "method") + " [" + name + "]."); + } + + if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { + throw new IllegalArgumentException("Invalid " + (statik ? "function" : "method") + + " name [" + name + "] with the struct [" + owner.name + "]."); + } + + if (owner.constructors.containsKey(name)) { + throw new IllegalArgumentException("Constructors and " + (statik ? "functions" : "methods") + + " may not have the same name [" + name + "] within the same struct" + + " [" + owner.name + "]."); + } + + if (owner.statics.containsKey(name)) { + if (statik) { + throw new IllegalArgumentException( + "Duplicate function name [" + name + "] found within the struct [" + owner.name + "]."); + } else { + throw new IllegalArgumentException("Functions and methods may not have the same name" + + " [" + name + "] within the same struct [" + owner.name + "]."); + } + } + + if (owner.methods.containsKey(name)) { + if (statik) { + throw new IllegalArgumentException("Functions and methods may not have the same name" + + " [" + name + "] within the same struct [" + owner.name + "]."); + } else { + throw new IllegalArgumentException("Duplicate method name [" + name + "]" + + " found within the struct [" + owner.name + "]."); + } + } + + if (genrtn != null) { + try { + genrtn.clazz.asSubclass(rtn.clazz); + } catch (ClassCastException exception) { + throw new ClassCastException("Generic return [" + genrtn.clazz.getCanonicalName() + "]" + + " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + + " [" + name + " ] from the struct [" + owner.name + "]."); + } + } + + if (genargs != null && genargs.length != args.length) { + throw new IllegalArgumentException("Generic arguments arity [" + genargs.length + "] is not the same as " + + (statik ? "function" : "method") + " [" + name + "] arguments arity" + + " [" + args.length + "] within the struct [" + owner.name + "]."); + } + + final Class[] classes = new Class[args.length]; + + for (int count = 0; count < classes.length; ++count) { + if (genargs != null) { + try { + genargs[count].clazz.asSubclass(args[count].clazz); + } catch (ClassCastException exception) { + throw new ClassCastException("Generic argument [" + genargs[count].name + "] is not a sub class" + + " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + + " [" + name + " ] from the struct [" + owner.name + "]."); + } + } + + classes[count] = args[count].clazz; + } + + final java.lang.reflect.Method reflect; + + try { + reflect = owner.clazz.getMethod(alias == null ? name : alias, classes); + } catch (NoSuchMethodException exception) { + throw new IllegalArgumentException((statik ? "Function" : "Method") + + " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); + } + + if (!reflect.getReturnType().equals(rtn.clazz)) { + throw new IllegalArgumentException("Specified return type class [" + rtn.clazz + "]" + + " does not match the found return type class [" + reflect.getReturnType() + "] for the " + + (statik ? "function" : "method") + " [" + name + "]" + + " within the struct [" + owner.name + "]."); + } + + final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); + + MethodHandle handle; + + try { + if (statik) { + handle = MethodHandles.publicLookup().in(owner.clazz).findStatic( + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + } else { + handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + } + } catch (NoSuchMethodException | IllegalAccessException exception) { + throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + "]" + + " not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); + } + + final Method method = new Method(name, owner, genrtn != null ? genrtn : rtn, + Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); + final int modifiers = reflect.getModifiers(); + + if (statik) { + if (!java.lang.reflect.Modifier.isStatic(modifiers)) { + throw new IllegalArgumentException("Function [" + name + "]" + + " within the struct [" + owner.name + "] is not linked to a static Java method."); + } + + owner.functions.put(name, method); + } else { + if (java.lang.reflect.Modifier.isStatic(modifiers)) { + throw new IllegalArgumentException("Method [" + name + "]" + + " within the struct [" + owner.name + "] is not linked to a non-static Java method."); + } + + owner.methods.put(name, method); + } + } + + public final void addField(final String struct, final String name, final String alias, + final boolean statik, final Type type, final Type generic) { + final Struct owner = structs.get(struct); + + if (owner == null) { + throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for " + + (statik ? "static" : "member") + " [" + name + "]."); + } + + if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { + throw new IllegalArgumentException("Invalid " + (statik ? "static" : "member") + + " name [" + name + "] with the struct [" + owner.name + "]."); + } + + if (owner.statics.containsKey(name)) { + if (statik) { + throw new IllegalArgumentException("Duplicate static name [" + name + "]" + + " found within the struct [" + owner.name + "]."); + } else { + throw new IllegalArgumentException("Statics and members may not have the same name " + + "[" + name + "] within the same struct [" + owner.name + "]."); + } + } + + if (owner.members.containsKey(name)) { + if (statik) { + throw new IllegalArgumentException("Statics and members may not have the same name " + + "[" + name + "] within the same struct [" + owner.name + "]."); + } else { + throw new IllegalArgumentException("Duplicate member name [" + name + "]" + + " found within the struct [" + owner.name + "]."); + } + } + + if (generic != null) { + try { + generic.clazz.asSubclass(type.clazz); + } catch (ClassCastException exception) { + throw new ClassCastException("Generic type [" + generic.clazz.getCanonicalName() + "]" + + " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + + " [" + name + " ] from the struct [" + owner.name + "]."); + } + } + + java.lang.reflect.Field reflect; + + try { + reflect = owner.clazz.getField(alias == null ? name : alias); + } catch (NoSuchFieldException exception) { + throw new IllegalArgumentException("Field [" + (alias == null ? name : alias) + "]" + + " not found for class [" + owner.clazz.getName() + "]."); + } + + MethodHandle getter = null; + MethodHandle setter = null; + + try { + if (!statik) { + getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( + owner.clazz, alias == null ? name : alias, type.clazz); + setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( + owner.clazz, alias == null ? name : alias, type.clazz); + } + } catch (NoSuchFieldException | IllegalAccessException exception) { + throw new IllegalArgumentException("Getter/Setter [" + (alias == null ? name : alias) + "]" + + " not found for class [" + owner.clazz.getName() + "]."); + } + + final Field field = new Field(name, owner, generic == null ? type : generic, type, reflect, getter, setter); + final int modifiers = reflect.getModifiers(); + + if (statik) { + if (!java.lang.reflect.Modifier.isStatic(modifiers)) { + throw new IllegalArgumentException(); + } + + if (!java.lang.reflect.Modifier.isFinal(modifiers)) { + throw new IllegalArgumentException("Static [" + name + "]" + + " within the struct [" + owner.name + "] is not linked to static Java field."); + } + + owner.statics.put(alias == null ? name : alias, field); + } else { + if (java.lang.reflect.Modifier.isStatic(modifiers)) { + throw new IllegalArgumentException("Member [" + name + "]" + + " within the struct [" + owner.name + "] is not linked to non-static Java field."); + } + + owner.members.put(alias == null ? name : alias, field); + } + } + + public final void copyStruct(final String struct, final String... children) { + final Struct owner = structs.get(struct); + + if (owner == null) { + throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); + } + + for (int count = 0; count < children.length; ++count) { + final Struct child = structs.get(children[count]); + + if (struct == null) { + throw new IllegalArgumentException("Child struct [" + children[count] + "]" + + " not defined for copy to owner struct [" + owner.name + "]."); + } + + try { + owner.clazz.asSubclass(child.clazz); + } catch (ClassCastException exception) { + throw new ClassCastException("Child struct [" + child.name + "]" + + " is not a super type of owner struct [" + owner.name + "] in copy."); + } + + final boolean object = child.clazz.equals(Object.class) && + java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); + + for (final Method method : child.methods.values()) { + if (owner.methods.get(method.name) == null) { + final Class clazz = object ? Object.class : owner.clazz; + + java.lang.reflect.Method reflect; + MethodHandle handle; + + try { + reflect = clazz.getMethod(method.method.getName(), method.reflect.getParameterTypes()); + } catch (NoSuchMethodException exception) { + throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); + } + + try { + handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( + owner.clazz, method.method.getName(), + MethodType.methodType(method.reflect.getReturnType(), method.reflect.getParameterTypes())); + } catch (NoSuchMethodException | IllegalAccessException exception) { + throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); + } + + owner.methods.put(method.name, + new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); + } + } + + for (final Field field : child.members.values()) { + if (owner.members.get(field.name) == null) { + java.lang.reflect.Field reflect; + MethodHandle getter; + MethodHandle setter; + + try { + reflect = owner.clazz.getField(field.reflect.getName()); + } catch (NoSuchFieldException exception) { + throw new IllegalArgumentException("Field [" + field.reflect.getName() + "]" + + " not found for class [" + owner.clazz.getName() + "]."); + } + + try { + getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( + owner.clazz, field.name, field.type.clazz); + setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( + owner.clazz, field.name, field.type.clazz); + } catch (NoSuchFieldException | IllegalAccessException exception) { + throw new IllegalArgumentException("Getter/Setter [" + field.name + "]" + + " not found for class [" + owner.clazz.getName() + "]."); + } + + owner.members.put(field.name, + new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); + } + } + } + } + + public final void addTransform(final Type from, final Type to, final String struct, + final String name, final boolean statik) { + final Struct owner = structs.get(struct); + + if (owner == null) { + throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for" + + " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); + } + + if (from.equals(to)) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "] cannot" + + " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); + } + + final Cast cast = new Cast(from, to); + + if (transforms.containsKey(cast)) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); + } + + Method method; + Type upcast = null; + Type downcast = null; + + if (statik) { + method = owner.functions.get(name); + + if (method == null) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a function [" + name + "] that is not defined."); + } + + if (method.arguments.size() != 1) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using function [" + name + "] does not have a single type argument."); + } + + Type argument = method.arguments.get(0); + + try { + from.clazz.asSubclass(argument.clazz); + } catch (ClassCastException cce0) { + try { + argument.clazz.asSubclass(from.clazz); + upcast = argument; + } catch (ClassCastException cce1) { + throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast from type to the function input argument type."); + } + } + + final Type rtn = method.rtn; + + try { + rtn.clazz.asSubclass(to.clazz); + } catch (ClassCastException cce0) { + try { + to.clazz.asSubclass(rtn.clazz); + downcast = to; + } catch (ClassCastException cce1) { + throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast to type to the function return argument type."); + } + } + } else { + method = owner.methods.get(name); + + if (method == null) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a method [" + name + "] that is not defined."); + } + + if (!method.arguments.isEmpty()) { + throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using method [" + name + "] does not have a single type argument."); + } + + try { + from.clazz.asSubclass(owner.clazz); + } catch (ClassCastException cce0) { + try { + owner.clazz.asSubclass(from.clazz); + upcast = getType(owner.name); + } catch (ClassCastException cce1) { + throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " method [" + name + "] cannot cast from type to the method input argument type."); + } + } + + final Type rtn = method.rtn; + + try { + rtn.clazz.asSubclass(to.clazz); + } catch (ClassCastException cce0) { + try { + to.clazz.asSubclass(rtn.clazz); + downcast = to; + } catch (ClassCastException cce1) { + throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + + " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + + " using method [" + name + "] cannot cast to type to the method return argument type."); + } + } + } + + final Transform transform = new Transform(cast, method, upcast, downcast); + transforms.put(cast, transform); + } + + public final void addBound(final Type type0, final Type type1, final Type bound) { + final Pair pair0 = new Pair(type0, type1); + final Pair pair1 = new Pair(type1, type0); + + if (bounds.containsKey(pair0)) { + throw new IllegalArgumentException( + "Bound already defined for types [" + type0.name + "] and [" + type1.name + "]."); + } + + if (bounds.containsKey(pair1)) { + throw new IllegalArgumentException( + "Bound already defined for types [" + type1.name + "] and [" + type0.name + "]."); + } + + bounds.put(pair0, bound); + bounds.put(pair1, bound); + } + + Type getType(final String name) { + final int dimensions = getDimensions(name); + final String structstr = dimensions == 0 ? name : name.substring(0, name.indexOf('[')); + final Struct struct = structs.get(structstr); + + if (struct == null) { + throw new IllegalArgumentException("The struct with name [" + name + "] has not been defined."); + } + + return getType(struct, dimensions); + } + + Type getType(final Struct struct, final int dimensions) { + String name = struct.name; + org.objectweb.asm.Type type = struct.type; + Class clazz = struct.clazz; + Sort sort; + + if (dimensions > 0) { + final StringBuilder builder = new StringBuilder(name); + final char[] brackets = new char[dimensions]; + + for (int count = 0; count < dimensions; ++count) { + builder.append("[]"); + brackets[count] = '['; + } + + final String descriptor = new String(brackets) + struct.type.getDescriptor(); + + name = builder.toString(); + type = org.objectweb.asm.Type.getType(descriptor); + + try { + clazz = Class.forName(type.getInternalName().replace('/', '.')); + } catch (ClassNotFoundException exception) { + throw new IllegalArgumentException("The class [" + type.getInternalName() + "]" + + " could not be found to create type [" + name + "]."); + } + + sort = Sort.ARRAY; + } else if ("def".equals(struct.name)) { + sort = Sort.DEF; + } else { + sort = Sort.OBJECT; + + for (Sort value : Sort.values()) { + if (value.clazz == null) { + continue; + } + + if (value.clazz.equals(struct.clazz)) { + sort = value; + + break; + } + } + } + + return new Type(name, struct, clazz, type, sort); + } + + private int getDimensions(final String name) { + int dimensions = 0; + int index = name.indexOf('['); + + if (index != -1) { + final int length = name.length(); + + while (index < length) { + if (name.charAt(index) == '[' && ++index < length && name.charAt(index++) == ']') { + ++dimensions; + } else { + throw new IllegalArgumentException("Invalid array braces in canonical name [" + name + "]."); + } + } + } + + return dimensions; + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java new file mode 100644 index 00000000000..95e3c93a354 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java @@ -0,0 +1,45 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.text.ParseException; + +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.LexerNoViableAltException; +import org.antlr.v4.runtime.misc.Interval; + +class ErrorHandlingLexer extends PlanALexer { + public ErrorHandlingLexer(CharStream charStream) { + super(charStream); + } + + @Override + public void recover(LexerNoViableAltException lnvae) { + CharStream charStream = lnvae.getInputStream(); + int startIndex = lnvae.getStartIndex(); + String text = charStream.getText(Interval.of(startIndex, charStream.index())); + + ParseException parseException = new ParseException("Error [" + _tokenStartLine + ":" + + _tokenStartCharPositionInLine + "]: unexpected character [" + + getErrorDisplay(text) + "].", _tokenStartCharIndex); + parseException.initCause(lnvae); + throw new RuntimeException(parseException); + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java new file mode 100644 index 00000000000..09e28cf8216 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Executable.java @@ -0,0 +1,50 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.util.Map; + +public abstract class Executable { + protected final Definition definition; + + private final String name; + private final String source; + + public Executable(final Definition definition, final String name, final String source) { + this.definition = definition; + + this.name = name; + this.source = source; + } + + public String getName() { + return name; + } + + public String getSource() { + return source; + } + + public Definition getDefinition() { + return definition; + } + + public abstract Object execute(Map input); +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java new file mode 100644 index 00000000000..3fe36034792 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ParserErrorStrategy.java @@ -0,0 +1,74 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.text.ParseException; + +import org.antlr.v4.runtime.DefaultErrorStrategy; +import org.antlr.v4.runtime.InputMismatchException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.Parser; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Token; + +class ParserErrorStrategy extends DefaultErrorStrategy { + @Override + public void recover(Parser recognizer, RecognitionException re) { + Token token = re.getOffendingToken(); + String message; + + if (token == null) { + message = "Error: no parse token found."; + } else if (re instanceof InputMismatchException) { + message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + + " unexpected token [" + getTokenErrorDisplay(token) + "]" + + " was expecting one of [" + re.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; + } else if (re instanceof NoViableAltException) { + if (token.getType() == PlanAParser.EOF) { + message = "Error: unexpected end of script."; + } else { + message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + + "invalid sequence of tokens near [" + getTokenErrorDisplay(token) + "]."; + } + } else { + message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + + " unexpected token near [" + getTokenErrorDisplay(token) + "]."; + } + + ParseException parseException = new ParseException(message, token == null ? -1 : token.getStartIndex()); + parseException.initCause(re); + + throw new RuntimeException(parseException); + } + + @Override + public Token recoverInline(Parser recognizer) throws RecognitionException { + Token token = recognizer.getCurrentToken(); + String message = "Error[" + token.getLine() + ":" + token.getCharPositionInLine() + "]:" + + " unexpected token [" + getTokenErrorDisplay(token) + "]" + + " was expecting one of [" + recognizer.getExpectedTokens().toString(recognizer.getVocabulary()) + "]."; + ParseException parseException = new ParseException(message, token.getStartIndex()); + throw new RuntimeException(parseException); + } + + @Override + public void sync(Parser recognizer) { + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java new file mode 100644 index 00000000000..a9e5ff623bf --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanALexer.java @@ -0,0 +1,390 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.plan.a; + + import java.util.Set; + +import org.antlr.v4.runtime.Lexer; +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; + +@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) +class PlanALexer extends Lexer { + static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); } + + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = + new PredictionContextCache(); + public static final int + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + EXTID=76; + public static final int EXT = 1; + public static String[] modeNames = { + "DEFAULT_MODE", "EXT" + }; + + public static final String[] ruleNames = { + "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", + "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + "NULL", "TYPE", "GENERIC", "ID", "EXTINTEGER", "EXTID" + }; + + private static final String[] _LITERAL_NAMES = { + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, null, null, "'true'", "'false'", "'null'" + }; + private static final String[] _SYMBOLIC_NAMES = { + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" + }; + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); + + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } + + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } + } + + @Override + @Deprecated + public String[] getTokenNames() { + return tokenNames; + } + + @Override + + public Vocabulary getVocabulary() { + return VOCABULARY; + } + + + private Set types = null; + + void setTypes(Set types) { + this.types = types; + } + + + public PlanALexer(CharStream input) { + super(input); + _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); + } + + @Override + public String getGrammarFileName() { return "PlanALexer.g4"; } + + @Override + public String[] getRuleNames() { return ruleNames; } + + @Override + public String getSerializedATN() { return _serializedATN; } + + @Override + public String[] getModeNames() { return modeNames; } + + @Override + public ATN getATN() { return _ATN; } + + @Override + public void action(RuleContext _localctx, int ruleIndex, int actionIndex) { + switch (ruleIndex) { + case 67: + STRING_action((RuleContext)_localctx, actionIndex); + break; + case 68: + CHAR_action((RuleContext)_localctx, actionIndex); + break; + case 72: + TYPE_action((RuleContext)_localctx, actionIndex); + break; + } + } + private void STRING_action(RuleContext _localctx, int actionIndex) { + switch (actionIndex) { + case 0: + setText(getText().substring(1, getText().length() - 1)); + break; + } + } + private void CHAR_action(RuleContext _localctx, int actionIndex) { + switch (actionIndex) { + case 1: + setText(getText().substring(1, getText().length() - 1)); + break; + } + } + private void TYPE_action(RuleContext _localctx, int actionIndex) { + switch (actionIndex) { + case 2: + setText(getText().replace(" ", "")); + break; + } + } + @Override + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 72: + return TYPE_sempred((RuleContext)_localctx, predIndex); + } + return true; + } + private boolean TYPE_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return types.contains(getText().replace(" ", "")); + } + return true; + } + + public static final String _serializedATN = + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2N\u0236\b\1\b\1\4"+ + "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ + "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ + "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ + "\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+ + " \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+ + "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ + "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ + "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ + "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\3\2\6\2\u00a0\n\2\r\2\16\2\u00a1\3"+ + "\2\3\2\3\3\3\3\3\3\3\3\7\3\u00aa\n\3\f\3\16\3\u00ad\13\3\3\3\3\3\3\3\3"+ + "\3\3\3\7\3\u00b4\n\3\f\3\16\3\u00b7\13\3\3\3\3\3\5\3\u00bb\n\3\3\3\3\3"+ + "\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13"+ + "\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+ + "\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22"+ + "\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24"+ + "\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27"+ + "\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33"+ + "\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3"+ + "\"\3\"\3\"\3#\3#\3$\3$\3$\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3"+ + ")\3)\3*\3*\3*\3*\3+\3+\3,\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3"+ + "\61\3\62\3\62\3\62\3\63\3\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3"+ + "\66\3\67\3\67\3\67\38\38\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3="+ + "\3=\3=\3>\3>\3>\3>\3?\3?\3?\3?\3?\3@\3@\3@\3@\3A\3A\6A\u0185\nA\rA\16"+ + "A\u0186\3A\5A\u018a\nA\3B\3B\3B\6B\u018f\nB\rB\16B\u0190\3B\5B\u0194\n"+ + "B\3C\3C\3C\7C\u0199\nC\fC\16C\u019c\13C\5C\u019e\nC\3C\5C\u01a1\nC\3D"+ + "\3D\3D\7D\u01a6\nD\fD\16D\u01a9\13D\5D\u01ab\nD\3D\3D\7D\u01af\nD\fD\16"+ + "D\u01b2\13D\3D\3D\5D\u01b6\nD\3D\6D\u01b9\nD\rD\16D\u01ba\5D\u01bd\nD"+ + "\3D\5D\u01c0\nD\3E\3E\3E\3E\3E\3E\7E\u01c8\nE\fE\16E\u01cb\13E\3E\3E\3"+ + "E\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3J\3"+ + "J\5J\u01e7\nJ\3J\3J\3J\3K\7K\u01ed\nK\fK\16K\u01f0\13K\3K\3K\7K\u01f4"+ + "\nK\fK\16K\u01f7\13K\3K\3K\5K\u01fb\nK\3K\7K\u01fe\nK\fK\16K\u0201\13"+ + "K\3K\3K\7K\u0205\nK\fK\16K\u0208\13K\3K\3K\5K\u020c\nK\3K\7K\u020f\nK"+ + "\fK\16K\u0212\13K\7K\u0214\nK\fK\16K\u0217\13K\3K\3K\3L\3L\7L\u021d\n"+ + "L\fL\16L\u0220\13L\3M\3M\3M\7M\u0225\nM\fM\16M\u0228\13M\5M\u022a\nM\3"+ + "M\3M\3N\3N\7N\u0230\nN\fN\16N\u0233\13N\3N\3N\5\u00ab\u00b5\u01c9\2O\4"+ + "\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21"+ + "\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!"+ + "B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:"+ + "t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090"+ + "I\u0092J\u0094K\u0096\2\u0098L\u009aM\u009cN\4\2\3\21\5\2\13\f\17\17\""+ + "\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b"+ + "\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aac|\6\2\62;C\\a"+ + "ac|\u0255\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2"+ + "\2\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30"+ + "\3\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2"+ + "\2$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60"+ + "\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2"+ + "\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H"+ + "\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2"+ + "\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2"+ + "\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2"+ + "n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3"+ + "\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3"+ + "\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2"+ + "\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0098"+ + "\3\2\2\2\3\u009a\3\2\2\2\3\u009c\3\2\2\2\4\u009f\3\2\2\2\6\u00ba\3\2\2"+ + "\2\b\u00be\3\2\2\2\n\u00c0\3\2\2\2\f\u00c2\3\2\2\2\16\u00c4\3\2\2\2\20"+ + "\u00c6\3\2\2\2\22\u00c8\3\2\2\2\24\u00ca\3\2\2\2\26\u00ce\3\2\2\2\30\u00d0"+ + "\3\2\2\2\32\u00d2\3\2\2\2\34\u00d5\3\2\2\2\36\u00da\3\2\2\2 \u00e0\3\2"+ + "\2\2\"\u00e3\3\2\2\2$\u00e7\3\2\2\2&\u00f0\3\2\2\2(\u00f6\3\2\2\2*\u00fd"+ + "\3\2\2\2,\u0101\3\2\2\2.\u0105\3\2\2\2\60\u010b\3\2\2\2\62\u0111\3\2\2"+ + "\2\64\u0113\3\2\2\2\66\u0115\3\2\2\28\u0117\3\2\2\2:\u0119\3\2\2\2<\u011b"+ + "\3\2\2\2>\u011d\3\2\2\2@\u011f\3\2\2\2B\u0122\3\2\2\2D\u0125\3\2\2\2F"+ + "\u0129\3\2\2\2H\u012b\3\2\2\2J\u012e\3\2\2\2L\u0130\3\2\2\2N\u0133\3\2"+ + "\2\2P\u0136\3\2\2\2R\u013a\3\2\2\2T\u013d\3\2\2\2V\u0141\3\2\2\2X\u0143"+ + "\3\2\2\2Z\u0145\3\2\2\2\\\u0147\3\2\2\2^\u014a\3\2\2\2`\u014d\3\2\2\2"+ + "b\u014f\3\2\2\2d\u0151\3\2\2\2f\u0154\3\2\2\2h\u0157\3\2\2\2j\u0159\3"+ + "\2\2\2l\u015c\3\2\2\2n\u015f\3\2\2\2p\u0162\3\2\2\2r\u0165\3\2\2\2t\u0168"+ + "\3\2\2\2v\u016b\3\2\2\2x\u016e\3\2\2\2z\u0171\3\2\2\2|\u0175\3\2\2\2~"+ + "\u0179\3\2\2\2\u0080\u017e\3\2\2\2\u0082\u0182\3\2\2\2\u0084\u018b\3\2"+ + "\2\2\u0086\u019d\3\2\2\2\u0088\u01aa\3\2\2\2\u008a\u01c1\3\2\2\2\u008c"+ + "\u01cf\3\2\2\2\u008e\u01d4\3\2\2\2\u0090\u01d9\3\2\2\2\u0092\u01df\3\2"+ + "\2\2\u0094\u01e4\3\2\2\2\u0096\u01ee\3\2\2\2\u0098\u021a\3\2\2\2\u009a"+ + "\u0229\3\2\2\2\u009c\u022d\3\2\2\2\u009e\u00a0\t\2\2\2\u009f\u009e\3\2"+ + "\2\2\u00a0\u00a1\3\2\2\2\u00a1\u009f\3\2\2\2\u00a1\u00a2\3\2\2\2\u00a2"+ + "\u00a3\3\2\2\2\u00a3\u00a4\b\2\2\2\u00a4\5\3\2\2\2\u00a5\u00a6\7\61\2"+ + "\2\u00a6\u00a7\7\61\2\2\u00a7\u00ab\3\2\2\2\u00a8\u00aa\13\2\2\2\u00a9"+ + "\u00a8\3\2\2\2\u00aa\u00ad\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ab\u00a9\3\2"+ + "\2\2\u00ac\u00ae\3\2\2\2\u00ad\u00ab\3\2\2\2\u00ae\u00bb\t\3\2\2\u00af"+ + "\u00b0\7\61\2\2\u00b0\u00b1\7,\2\2\u00b1\u00b5\3\2\2\2\u00b2\u00b4\13"+ + "\2\2\2\u00b3\u00b2\3\2\2\2\u00b4\u00b7\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b5"+ + "\u00b3\3\2\2\2\u00b6\u00b8\3\2\2\2\u00b7\u00b5\3\2\2\2\u00b8\u00b9\7,"+ + "\2\2\u00b9\u00bb\7\61\2\2\u00ba\u00a5\3\2\2\2\u00ba\u00af\3\2\2\2\u00bb"+ + "\u00bc\3\2\2\2\u00bc\u00bd\b\3\2\2\u00bd\7\3\2\2\2\u00be\u00bf\7}\2\2"+ + "\u00bf\t\3\2\2\2\u00c0\u00c1\7\177\2\2\u00c1\13\3\2\2\2\u00c2\u00c3\7"+ + "]\2\2\u00c3\r\3\2\2\2\u00c4\u00c5\7_\2\2\u00c5\17\3\2\2\2\u00c6\u00c7"+ + "\7*\2\2\u00c7\21\3\2\2\2\u00c8\u00c9\7+\2\2\u00c9\23\3\2\2\2\u00ca\u00cb"+ + "\7\60\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\b\n\3\2\u00cd\25\3\2\2\2\u00ce"+ + "\u00cf\7.\2\2\u00cf\27\3\2\2\2\u00d0\u00d1\7=\2\2\u00d1\31\3\2\2\2\u00d2"+ + "\u00d3\7k\2\2\u00d3\u00d4\7h\2\2\u00d4\33\3\2\2\2\u00d5\u00d6\7g\2\2\u00d6"+ + "\u00d7\7n\2\2\u00d7\u00d8\7u\2\2\u00d8\u00d9\7g\2\2\u00d9\35\3\2\2\2\u00da"+ + "\u00db\7y\2\2\u00db\u00dc\7j\2\2\u00dc\u00dd\7k\2\2\u00dd\u00de\7n\2\2"+ + "\u00de\u00df\7g\2\2\u00df\37\3\2\2\2\u00e0\u00e1\7f\2\2\u00e1\u00e2\7"+ + "q\2\2\u00e2!\3\2\2\2\u00e3\u00e4\7h\2\2\u00e4\u00e5\7q\2\2\u00e5\u00e6"+ + "\7t\2\2\u00e6#\3\2\2\2\u00e7\u00e8\7e\2\2\u00e8\u00e9\7q\2\2\u00e9\u00ea"+ + "\7p\2\2\u00ea\u00eb\7v\2\2\u00eb\u00ec\7k\2\2\u00ec\u00ed\7p\2\2\u00ed"+ + "\u00ee\7w\2\2\u00ee\u00ef\7g\2\2\u00ef%\3\2\2\2\u00f0\u00f1\7d\2\2\u00f1"+ + "\u00f2\7t\2\2\u00f2\u00f3\7g\2\2\u00f3\u00f4\7c\2\2\u00f4\u00f5\7m\2\2"+ + "\u00f5\'\3\2\2\2\u00f6\u00f7\7t\2\2\u00f7\u00f8\7g\2\2\u00f8\u00f9\7v"+ + "\2\2\u00f9\u00fa\7w\2\2\u00fa\u00fb\7t\2\2\u00fb\u00fc\7p\2\2\u00fc)\3"+ + "\2\2\2\u00fd\u00fe\7p\2\2\u00fe\u00ff\7g\2\2\u00ff\u0100\7y\2\2\u0100"+ + "+\3\2\2\2\u0101\u0102\7v\2\2\u0102\u0103\7t\2\2\u0103\u0104\7{\2\2\u0104"+ + "-\3\2\2\2\u0105\u0106\7e\2\2\u0106\u0107\7c\2\2\u0107\u0108\7v\2\2\u0108"+ + "\u0109\7e\2\2\u0109\u010a\7j\2\2\u010a/\3\2\2\2\u010b\u010c\7v\2\2\u010c"+ + "\u010d\7j\2\2\u010d\u010e\7t\2\2\u010e\u010f\7q\2\2\u010f\u0110\7y\2\2"+ + "\u0110\61\3\2\2\2\u0111\u0112\7#\2\2\u0112\63\3\2\2\2\u0113\u0114\7\u0080"+ + "\2\2\u0114\65\3\2\2\2\u0115\u0116\7,\2\2\u0116\67\3\2\2\2\u0117\u0118"+ + "\7\61\2\2\u01189\3\2\2\2\u0119\u011a\7\'\2\2\u011a;\3\2\2\2\u011b\u011c"+ + "\7-\2\2\u011c=\3\2\2\2\u011d\u011e\7/\2\2\u011e?\3\2\2\2\u011f\u0120\7"+ + ">\2\2\u0120\u0121\7>\2\2\u0121A\3\2\2\2\u0122\u0123\7@\2\2\u0123\u0124"+ + "\7@\2\2\u0124C\3\2\2\2\u0125\u0126\7@\2\2\u0126\u0127\7@\2\2\u0127\u0128"+ + "\7@\2\2\u0128E\3\2\2\2\u0129\u012a\7>\2\2\u012aG\3\2\2\2\u012b\u012c\7"+ + ">\2\2\u012c\u012d\7?\2\2\u012dI\3\2\2\2\u012e\u012f\7@\2\2\u012fK\3\2"+ + "\2\2\u0130\u0131\7@\2\2\u0131\u0132\7?\2\2\u0132M\3\2\2\2\u0133\u0134"+ + "\7?\2\2\u0134\u0135\7?\2\2\u0135O\3\2\2\2\u0136\u0137\7?\2\2\u0137\u0138"+ + "\7?\2\2\u0138\u0139\7?\2\2\u0139Q\3\2\2\2\u013a\u013b\7#\2\2\u013b\u013c"+ + "\7?\2\2\u013cS\3\2\2\2\u013d\u013e\7#\2\2\u013e\u013f\7?\2\2\u013f\u0140"+ + "\7?\2\2\u0140U\3\2\2\2\u0141\u0142\7(\2\2\u0142W\3\2\2\2\u0143\u0144\7"+ + "`\2\2\u0144Y\3\2\2\2\u0145\u0146\7~\2\2\u0146[\3\2\2\2\u0147\u0148\7("+ + "\2\2\u0148\u0149\7(\2\2\u0149]\3\2\2\2\u014a\u014b\7~\2\2\u014b\u014c"+ + "\7~\2\2\u014c_\3\2\2\2\u014d\u014e\7A\2\2\u014ea\3\2\2\2\u014f\u0150\7"+ + "<\2\2\u0150c\3\2\2\2\u0151\u0152\7-\2\2\u0152\u0153\7-\2\2\u0153e\3\2"+ + "\2\2\u0154\u0155\7/\2\2\u0155\u0156\7/\2\2\u0156g\3\2\2\2\u0157\u0158"+ + "\7?\2\2\u0158i\3\2\2\2\u0159\u015a\7-\2\2\u015a\u015b\7?\2\2\u015bk\3"+ + "\2\2\2\u015c\u015d\7/\2\2\u015d\u015e\7?\2\2\u015em\3\2\2\2\u015f\u0160"+ + "\7,\2\2\u0160\u0161\7?\2\2\u0161o\3\2\2\2\u0162\u0163\7\61\2\2\u0163\u0164"+ + "\7?\2\2\u0164q\3\2\2\2\u0165\u0166\7\'\2\2\u0166\u0167\7?\2\2\u0167s\3"+ + "\2\2\2\u0168\u0169\7(\2\2\u0169\u016a\7?\2\2\u016au\3\2\2\2\u016b\u016c"+ + "\7`\2\2\u016c\u016d\7?\2\2\u016dw\3\2\2\2\u016e\u016f\7~\2\2\u016f\u0170"+ + "\7?\2\2\u0170y\3\2\2\2\u0171\u0172\7>\2\2\u0172\u0173\7>\2\2\u0173\u0174"+ + "\7?\2\2\u0174{\3\2\2\2\u0175\u0176\7@\2\2\u0176\u0177\7@\2\2\u0177\u0178"+ + "\7?\2\2\u0178}\3\2\2\2\u0179\u017a\7@\2\2\u017a\u017b\7@\2\2\u017b\u017c"+ + "\7@\2\2\u017c\u017d\7?\2\2\u017d\177\3\2\2\2\u017e\u017f\7\60\2\2\u017f"+ + "\u0180\7\60\2\2\u0180\u0181\7?\2\2\u0181\u0081\3\2\2\2\u0182\u0184\7\62"+ + "\2\2\u0183\u0185\t\4\2\2\u0184\u0183\3\2\2\2\u0185\u0186\3\2\2\2\u0186"+ + "\u0184\3\2\2\2\u0186\u0187\3\2\2\2\u0187\u0189\3\2\2\2\u0188\u018a\t\5"+ + "\2\2\u0189\u0188\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u0083\3\2\2\2\u018b"+ + "\u018c\7\62\2\2\u018c\u018e\t\6\2\2\u018d\u018f\t\7\2\2\u018e\u018d\3"+ + "\2\2\2\u018f\u0190\3\2\2\2\u0190\u018e\3\2\2\2\u0190\u0191\3\2\2\2\u0191"+ + "\u0193\3\2\2\2\u0192\u0194\t\5\2\2\u0193\u0192\3\2\2\2\u0193\u0194\3\2"+ + "\2\2\u0194\u0085\3\2\2\2\u0195\u019e\7\62\2\2\u0196\u019a\t\b\2\2\u0197"+ + "\u0199\t\t\2\2\u0198\u0197\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2"+ + "\2\2\u019a\u019b\3\2\2\2\u019b\u019e\3\2\2\2\u019c\u019a\3\2\2\2\u019d"+ + "\u0195\3\2\2\2\u019d\u0196\3\2\2\2\u019e\u01a0\3\2\2\2\u019f\u01a1\t\n"+ + "\2\2\u01a0\u019f\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u0087\3\2\2\2\u01a2"+ + "\u01ab\7\62\2\2\u01a3\u01a7\t\b\2\2\u01a4\u01a6\t\t\2\2\u01a5\u01a4\3"+ + "\2\2\2\u01a6\u01a9\3\2\2\2\u01a7\u01a5\3\2\2\2\u01a7\u01a8\3\2\2\2\u01a8"+ + "\u01ab\3\2\2\2\u01a9\u01a7\3\2\2\2\u01aa\u01a2\3\2\2\2\u01aa\u01a3\3\2"+ + "\2\2\u01ab\u01ac\3\2\2\2\u01ac\u01b0\5\24\n\2\u01ad\u01af\t\t\2\2\u01ae"+ + "\u01ad\3\2\2\2\u01af\u01b2\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b0\u01b1\3\2"+ + "\2\2\u01b1\u01bc\3\2\2\2\u01b2\u01b0\3\2\2\2\u01b3\u01b5\t\13\2\2\u01b4"+ + "\u01b6\t\f\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8\3\2"+ + "\2\2\u01b7\u01b9\t\t\2\2\u01b8\u01b7\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba"+ + "\u01b8\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01bd\3\2\2\2\u01bc\u01b3\3\2"+ + "\2\2\u01bc\u01bd\3\2\2\2\u01bd\u01bf\3\2\2\2\u01be\u01c0\t\r\2\2\u01bf"+ + "\u01be\3\2\2\2\u01bf\u01c0\3\2\2\2\u01c0\u0089\3\2\2\2\u01c1\u01c9\7$"+ + "\2\2\u01c2\u01c3\7^\2\2\u01c3\u01c8\7$\2\2\u01c4\u01c5\7^\2\2\u01c5\u01c8"+ + "\7^\2\2\u01c6\u01c8\n\16\2\2\u01c7\u01c2\3\2\2\2\u01c7\u01c4\3\2\2\2\u01c7"+ + "\u01c6\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9\u01ca\3\2\2\2\u01c9\u01c7\3\2"+ + "\2\2\u01ca\u01cc\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01cd\7$\2\2\u01cd"+ + "\u01ce\bE\4\2\u01ce\u008b\3\2\2\2\u01cf\u01d0\7)\2\2\u01d0\u01d1\13\2"+ + "\2\2\u01d1\u01d2\7)\2\2\u01d2\u01d3\bF\5\2\u01d3\u008d\3\2\2\2\u01d4\u01d5"+ + "\7v\2\2\u01d5\u01d6\7t\2\2\u01d6\u01d7\7w\2\2\u01d7\u01d8\7g\2\2\u01d8"+ + "\u008f\3\2\2\2\u01d9\u01da\7h\2\2\u01da\u01db\7c\2\2\u01db\u01dc\7n\2"+ + "\2\u01dc\u01dd\7u\2\2\u01dd\u01de\7g\2\2\u01de\u0091\3\2\2\2\u01df\u01e0"+ + "\7p\2\2\u01e0\u01e1\7w\2\2\u01e1\u01e2\7n\2\2\u01e2\u01e3\7n\2\2\u01e3"+ + "\u0093\3\2\2\2\u01e4\u01e6\5\u0098L\2\u01e5\u01e7\5\u0096K\2\u01e6\u01e5"+ + "\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u01e9\6J\2\2\u01e9"+ + "\u01ea\bJ\6\2\u01ea\u0095\3\2\2\2\u01eb\u01ed\7\"\2\2\u01ec\u01eb\3\2"+ + "\2\2\u01ed\u01f0\3\2\2\2\u01ee\u01ec\3\2\2\2\u01ee\u01ef\3\2\2\2\u01ef"+ + "\u01f1\3\2\2\2\u01f0\u01ee\3\2\2\2\u01f1\u01f5\7>\2\2\u01f2\u01f4\7\""+ + "\2\2\u01f3\u01f2\3\2\2\2\u01f4\u01f7\3\2\2\2\u01f5\u01f3\3\2\2\2\u01f5"+ + "\u01f6\3\2\2\2\u01f6\u01f8\3\2\2\2\u01f7\u01f5\3\2\2\2\u01f8\u01fa\5\u0098"+ + "L\2\u01f9\u01fb\5\u0096K\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+ + "\u01ff\3\2\2\2\u01fc\u01fe\7\"\2\2\u01fd\u01fc\3\2\2\2\u01fe\u0201\3\2"+ + "\2\2\u01ff\u01fd\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0215\3\2\2\2\u0201"+ + "\u01ff\3\2\2\2\u0202\u0206\5\26\13\2\u0203\u0205\7\"\2\2\u0204\u0203\3"+ + "\2\2\2\u0205\u0208\3\2\2\2\u0206\u0204\3\2\2\2\u0206\u0207\3\2\2\2\u0207"+ + "\u0209\3\2\2\2\u0208\u0206\3\2\2\2\u0209\u020b\5\u0098L\2\u020a\u020c"+ + "\5\u0096K\2\u020b\u020a\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u0210\3\2\2"+ + "\2\u020d\u020f\7\"\2\2\u020e\u020d\3\2\2\2\u020f\u0212\3\2\2\2\u0210\u020e"+ + "\3\2\2\2\u0210\u0211\3\2\2\2\u0211\u0214\3\2\2\2\u0212\u0210\3\2\2\2\u0213"+ + "\u0202\3\2\2\2\u0214\u0217\3\2\2\2\u0215\u0213\3\2\2\2\u0215\u0216\3\2"+ + "\2\2\u0216\u0218\3\2\2\2\u0217\u0215\3\2\2\2\u0218\u0219\7@\2\2\u0219"+ + "\u0097\3\2\2\2\u021a\u021e\t\17\2\2\u021b\u021d\t\20\2\2\u021c\u021b\3"+ + "\2\2\2\u021d\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e\u021f\3\2\2\2\u021f"+ + "\u0099\3\2\2\2\u0220\u021e\3\2\2\2\u0221\u022a\7\62\2\2\u0222\u0226\t"+ + "\b\2\2\u0223\u0225\t\t\2\2\u0224\u0223\3\2\2\2\u0225\u0228\3\2\2\2\u0226"+ + "\u0224\3\2\2\2\u0226\u0227\3\2\2\2\u0227\u022a\3\2\2\2\u0228\u0226\3\2"+ + "\2\2\u0229\u0221\3\2\2\2\u0229\u0222\3\2\2\2\u022a\u022b\3\2\2\2\u022b"+ + "\u022c\bM\7\2\u022c\u009b\3\2\2\2\u022d\u0231\t\17\2\2\u022e\u0230\t\20"+ + "\2\2\u022f\u022e\3\2\2\2\u0230\u0233\3\2\2\2\u0231\u022f\3\2\2\2\u0231"+ + "\u0232\3\2\2\2\u0232\u0234\3\2\2\2\u0233\u0231\3\2\2\2\u0234\u0235\bN"+ + "\7\2\u0235\u009d\3\2\2\2%\2\3\u00a1\u00ab\u00b5\u00ba\u0186\u0189\u0190"+ + "\u0193\u019a\u019d\u01a0\u01a7\u01aa\u01b0\u01b5\u01ba\u01bc\u01bf\u01c7"+ + "\u01c9\u01e6\u01ee\u01f5\u01fa\u01ff\u0206\u020b\u0210\u0215\u021e\u0226"+ + "\u0229\u0231\b\b\2\2\4\3\2\3E\2\3F\3\3J\4\4\2\2"; + public static final ATN _ATN = + new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java new file mode 100644 index 00000000000..13f61acb495 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java @@ -0,0 +1,2884 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; +import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; + +@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) +class PlanAParser extends Parser { + static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); } + + protected static final DFA[] _decisionToDFA; + protected static final PredictionContextCache _sharedContextCache = + new PredictionContextCache(); + public static final int + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, + BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, BOOLNOT=24, BWNOT=25, + MUL=26, DIV=27, REM=28, ADD=29, SUB=30, LSH=31, RSH=32, USH=33, LT=34, + LTE=35, GT=36, GTE=37, EQ=38, EQR=39, NE=40, NER=41, BWAND=42, BWXOR=43, + BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, + AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, + ALSH=60, ARSH=61, AUSH=62, ACAT=63, OCTAL=64, HEX=65, INTEGER=66, DECIMAL=67, + STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, + EXTID=76; + public static final int + RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, + RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, + RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, + RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, + RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, + RULE_arguments = 21, RULE_increment = 22; + public static final String[] ruleNames = { + "source", "statement", "block", "empty", "initializer", "afterthought", + "declaration", "decltype", "declvar", "expression", "extstart", "extprec", + "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", + "extnew", "extstring", "arguments", "increment" + }; + + private static final String[] _LITERAL_NAMES = { + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", + "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", + "'return'", "'new'", "'try'", "'catch'", "'throw'", "'!'", "'~'", "'*'", + "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", "'>'", + "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", "'&&'", + "'||'", "'?'", "':'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", "'..='", null, + null, null, null, null, null, "'true'", "'false'", "'null'" + }; + private static final String[] _SYMBOLIC_NAMES = { + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "BOOLNOT", "BWNOT", + "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", + "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "BWXOR", "BWOR", "BOOLAND", + "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", + "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "ACAT", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "CHAR", "TRUE", "FALSE", + "NULL", "TYPE", "ID", "EXTINTEGER", "EXTID" + }; + public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); + + /** + * @deprecated Use {@link #VOCABULARY} instead. + */ + @Deprecated + public static final String[] tokenNames; + static { + tokenNames = new String[_SYMBOLIC_NAMES.length]; + for (int i = 0; i < tokenNames.length; i++) { + tokenNames[i] = VOCABULARY.getLiteralName(i); + if (tokenNames[i] == null) { + tokenNames[i] = VOCABULARY.getSymbolicName(i); + } + + if (tokenNames[i] == null) { + tokenNames[i] = ""; + } + } + } + + @Override + @Deprecated + public String[] getTokenNames() { + return tokenNames; + } + + @Override + + public Vocabulary getVocabulary() { + return VOCABULARY; + } + + @Override + public String getGrammarFileName() { return "PlanAParser.g4"; } + + @Override + public String[] getRuleNames() { return ruleNames; } + + @Override + public String getSerializedATN() { return _serializedATN; } + + @Override + public ATN getATN() { return _ATN; } + + public PlanAParser(TokenStream input) { + super(input); + _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); + } + public static class SourceContext extends ParserRuleContext { + public TerminalNode EOF() { return getToken(PlanAParser.EOF, 0); } + public List statement() { + return getRuleContexts(StatementContext.class); + } + public StatementContext statement(int i) { + return getRuleContext(StatementContext.class,i); + } + public SourceContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_source; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitSource(this); + else return visitor.visitChildren(this); + } + } + + public final SourceContext source() throws RecognitionException { + SourceContext _localctx = new SourceContext(_ctx, getState()); + enterRule(_localctx, 0, RULE_source); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(47); + _errHandler.sync(this); + _la = _input.LA(1); + do { + { + { + setState(46); + statement(); + } + } + setState(49); + _errHandler.sync(this); + _la = _input.LA(1); + } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); + setState(51); + match(EOF); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class StatementContext extends ParserRuleContext { + public StatementContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_statement; } + + public StatementContext() { } + public void copyFrom(StatementContext ctx) { + super.copyFrom(ctx); + } + } + public static class DeclContext extends StatementContext { + public DeclarationContext declaration() { + return getRuleContext(DeclarationContext.class,0); + } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public DeclContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDecl(this); + else return visitor.visitChildren(this); + } + } + public static class BreakContext extends StatementContext { + public TerminalNode BREAK() { return getToken(PlanAParser.BREAK, 0); } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public BreakContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBreak(this); + else return visitor.visitChildren(this); + } + } + public static class ThrowContext extends StatementContext { + public TerminalNode THROW() { return getToken(PlanAParser.THROW, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public ThrowContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitThrow(this); + else return visitor.visitChildren(this); + } + } + public static class ContinueContext extends StatementContext { + public TerminalNode CONTINUE() { return getToken(PlanAParser.CONTINUE, 0); } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public ContinueContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitContinue(this); + else return visitor.visitChildren(this); + } + } + public static class ForContext extends StatementContext { + public TerminalNode FOR() { return getToken(PlanAParser.FOR, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public List SEMICOLON() { return getTokens(PlanAParser.SEMICOLON); } + public TerminalNode SEMICOLON(int i) { + return getToken(PlanAParser.SEMICOLON, i); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public EmptyContext empty() { + return getRuleContext(EmptyContext.class,0); + } + public InitializerContext initializer() { + return getRuleContext(InitializerContext.class,0); + } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public AfterthoughtContext afterthought() { + return getRuleContext(AfterthoughtContext.class,0); + } + public ForContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitFor(this); + else return visitor.visitChildren(this); + } + } + public static class TryContext extends StatementContext { + public TerminalNode TRY() { return getToken(PlanAParser.TRY, 0); } + public List block() { + return getRuleContexts(BlockContext.class); + } + public BlockContext block(int i) { + return getRuleContext(BlockContext.class,i); + } + public List CATCH() { return getTokens(PlanAParser.CATCH); } + public TerminalNode CATCH(int i) { + return getToken(PlanAParser.CATCH, i); + } + public List LP() { return getTokens(PlanAParser.LP); } + public TerminalNode LP(int i) { + return getToken(PlanAParser.LP, i); + } + public List RP() { return getTokens(PlanAParser.RP); } + public TerminalNode RP(int i) { + return getToken(PlanAParser.RP, i); + } + public List TYPE() { return getTokens(PlanAParser.TYPE); } + public TerminalNode TYPE(int i) { + return getToken(PlanAParser.TYPE, i); + } + public List ID() { return getTokens(PlanAParser.ID); } + public TerminalNode ID(int i) { + return getToken(PlanAParser.ID, i); + } + public TryContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTry(this); + else return visitor.visitChildren(this); + } + } + public static class ExprContext extends StatementContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public ExprContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExpr(this); + else return visitor.visitChildren(this); + } + } + public static class DoContext extends StatementContext { + public TerminalNode DO() { return getToken(PlanAParser.DO, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public TerminalNode WHILE() { return getToken(PlanAParser.WHILE, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public DoContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDo(this); + else return visitor.visitChildren(this); + } + } + public static class WhileContext extends StatementContext { + public TerminalNode WHILE() { return getToken(PlanAParser.WHILE, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public EmptyContext empty() { + return getRuleContext(EmptyContext.class,0); + } + public WhileContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitWhile(this); + else return visitor.visitChildren(this); + } + } + public static class IfContext extends StatementContext { + public TerminalNode IF() { return getToken(PlanAParser.IF, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public List block() { + return getRuleContexts(BlockContext.class); + } + public BlockContext block(int i) { + return getRuleContext(BlockContext.class,i); + } + public TerminalNode ELSE() { return getToken(PlanAParser.ELSE, 0); } + public IfContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitIf(this); + else return visitor.visitChildren(this); + } + } + public static class ReturnContext extends StatementContext { + public TerminalNode RETURN() { return getToken(PlanAParser.RETURN, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public ReturnContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitReturn(this); + else return visitor.visitChildren(this); + } + } + + public final StatementContext statement() throws RecognitionException { + StatementContext _localctx = new StatementContext(_ctx, getState()); + enterRule(_localctx, 2, RULE_statement); + int _la; + try { + int _alt; + setState(136); + switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { + case 1: + _localctx = new IfContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(53); + match(IF); + setState(54); + match(LP); + setState(55); + expression(0); + setState(56); + match(RP); + setState(57); + block(); + setState(60); + switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { + case 1: + { + setState(58); + match(ELSE); + setState(59); + block(); + } + break; + } + } + break; + case 2: + _localctx = new WhileContext(_localctx); + enterOuterAlt(_localctx, 2); + { + setState(62); + match(WHILE); + setState(63); + match(LP); + setState(64); + expression(0); + setState(65); + match(RP); + setState(68); + switch (_input.LA(1)) { + case LBRACK: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case CHAR: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: + { + setState(66); + block(); + } + break; + case SEMICOLON: + { + setState(67); + empty(); + } + break; + default: + throw new NoViableAltException(this); + } + } + break; + case 3: + _localctx = new DoContext(_localctx); + enterOuterAlt(_localctx, 3); + { + setState(70); + match(DO); + setState(71); + block(); + setState(72); + match(WHILE); + setState(73); + match(LP); + setState(74); + expression(0); + setState(75); + match(RP); + setState(77); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(76); + match(SEMICOLON); + } + } + + } + break; + case 4: + _localctx = new ForContext(_localctx); + enterOuterAlt(_localctx, 4); + { + setState(79); + match(FOR); + setState(80); + match(LP); + setState(82); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + { + setState(81); + initializer(); + } + } + + setState(84); + match(SEMICOLON); + setState(86); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + { + setState(85); + expression(0); + } + } + + setState(88); + match(SEMICOLON); + setState(90); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + { + setState(89); + afterthought(); + } + } + + setState(92); + match(RP); + setState(95); + switch (_input.LA(1)) { + case LBRACK: + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case CHAR: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: + { + setState(93); + block(); + } + break; + case SEMICOLON: + { + setState(94); + empty(); + } + break; + default: + throw new NoViableAltException(this); + } + } + break; + case 5: + _localctx = new DeclContext(_localctx); + enterOuterAlt(_localctx, 5); + { + setState(97); + declaration(); + setState(99); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(98); + match(SEMICOLON); + } + } + + } + break; + case 6: + _localctx = new ContinueContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(101); + match(CONTINUE); + setState(103); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(102); + match(SEMICOLON); + } + } + + } + break; + case 7: + _localctx = new BreakContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(105); + match(BREAK); + setState(107); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(106); + match(SEMICOLON); + } + } + + } + break; + case 8: + _localctx = new ReturnContext(_localctx); + enterOuterAlt(_localctx, 8); + { + setState(109); + match(RETURN); + setState(110); + expression(0); + setState(112); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(111); + match(SEMICOLON); + } + } + + } + break; + case 9: + _localctx = new TryContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(114); + match(TRY); + setState(115); + block(); + setState(123); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(116); + match(CATCH); + setState(117); + match(LP); + { + setState(118); + match(TYPE); + setState(119); + match(ID); + } + setState(121); + match(RP); + setState(122); + block(); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(125); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,12,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + } + break; + case 10: + _localctx = new ThrowContext(_localctx); + enterOuterAlt(_localctx, 10); + { + setState(127); + match(THROW); + setState(128); + expression(0); + setState(130); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(129); + match(SEMICOLON); + } + } + + } + break; + case 11: + _localctx = new ExprContext(_localctx); + enterOuterAlt(_localctx, 11); + { + setState(132); + expression(0); + setState(134); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(133); + match(SEMICOLON); + } + } + + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class BlockContext extends ParserRuleContext { + public BlockContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_block; } + + public BlockContext() { } + public void copyFrom(BlockContext ctx) { + super.copyFrom(ctx); + } + } + public static class SingleContext extends BlockContext { + public StatementContext statement() { + return getRuleContext(StatementContext.class,0); + } + public SingleContext(BlockContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitSingle(this); + else return visitor.visitChildren(this); + } + } + public static class MultipleContext extends BlockContext { + public TerminalNode LBRACK() { return getToken(PlanAParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PlanAParser.RBRACK, 0); } + public List statement() { + return getRuleContexts(StatementContext.class); + } + public StatementContext statement(int i) { + return getRuleContext(StatementContext.class,i); + } + public MultipleContext(BlockContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitMultiple(this); + else return visitor.visitChildren(this); + } + } + + public final BlockContext block() throws RecognitionException { + BlockContext _localctx = new BlockContext(_ctx, getState()); + enterRule(_localctx, 4, RULE_block); + int _la; + try { + setState(147); + switch (_input.LA(1)) { + case LBRACK: + _localctx = new MultipleContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(138); + match(LBRACK); + setState(142); + _errHandler.sync(this); + _la = _input.LA(1); + while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + { + { + setState(139); + statement(); + } + } + setState(144); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(145); + match(RBRACK); + } + break; + case LP: + case IF: + case WHILE: + case DO: + case FOR: + case CONTINUE: + case BREAK: + case RETURN: + case NEW: + case TRY: + case THROW: + case BOOLNOT: + case BWNOT: + case ADD: + case SUB: + case INCR: + case DECR: + case OCTAL: + case HEX: + case INTEGER: + case DECIMAL: + case STRING: + case CHAR: + case TRUE: + case FALSE: + case NULL: + case TYPE: + case ID: + _localctx = new SingleContext(_localctx); + enterOuterAlt(_localctx, 2); + { + setState(146); + statement(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class EmptyContext extends ParserRuleContext { + public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } + public EmptyContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_empty; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitEmpty(this); + else return visitor.visitChildren(this); + } + } + + public final EmptyContext empty() throws RecognitionException { + EmptyContext _localctx = new EmptyContext(_ctx, getState()); + enterRule(_localctx, 6, RULE_empty); + try { + enterOuterAlt(_localctx, 1); + { + setState(149); + match(SEMICOLON); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class InitializerContext extends ParserRuleContext { + public DeclarationContext declaration() { + return getRuleContext(DeclarationContext.class,0); + } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public InitializerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_initializer; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitInitializer(this); + else return visitor.visitChildren(this); + } + } + + public final InitializerContext initializer() throws RecognitionException { + InitializerContext _localctx = new InitializerContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_initializer); + try { + setState(153); + switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(151); + declaration(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(152); + expression(0); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class AfterthoughtContext extends ParserRuleContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public AfterthoughtContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_afterthought; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitAfterthought(this); + else return visitor.visitChildren(this); + } + } + + public final AfterthoughtContext afterthought() throws RecognitionException { + AfterthoughtContext _localctx = new AfterthoughtContext(_ctx, getState()); + enterRule(_localctx, 10, RULE_afterthought); + try { + enterOuterAlt(_localctx, 1); + { + setState(155); + expression(0); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class DeclarationContext extends ParserRuleContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public List declvar() { + return getRuleContexts(DeclvarContext.class); + } + public DeclvarContext declvar(int i) { + return getRuleContext(DeclvarContext.class,i); + } + public List COMMA() { return getTokens(PlanAParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(PlanAParser.COMMA, i); + } + public DeclarationContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_declaration; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDeclaration(this); + else return visitor.visitChildren(this); + } + } + + public final DeclarationContext declaration() throws RecognitionException { + DeclarationContext _localctx = new DeclarationContext(_ctx, getState()); + enterRule(_localctx, 12, RULE_declaration); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(157); + decltype(); + setState(158); + declvar(); + setState(163); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(159); + match(COMMA); + setState(160); + declvar(); + } + } + setState(165); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class DecltypeContext extends ParserRuleContext { + public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public List LBRACE() { return getTokens(PlanAParser.LBRACE); } + public TerminalNode LBRACE(int i) { + return getToken(PlanAParser.LBRACE, i); + } + public List RBRACE() { return getTokens(PlanAParser.RBRACE); } + public TerminalNode RBRACE(int i) { + return getToken(PlanAParser.RBRACE, i); + } + public DecltypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_decltype; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDecltype(this); + else return visitor.visitChildren(this); + } + } + + public final DecltypeContext decltype() throws RecognitionException { + DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); + enterRule(_localctx, 14, RULE_decltype); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(166); + match(TYPE); + setState(171); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==LBRACE) { + { + { + setState(167); + match(LBRACE); + setState(168); + match(RBRACE); + } + } + setState(173); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class DeclvarContext extends ParserRuleContext { + public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public TerminalNode ASSIGN() { return getToken(PlanAParser.ASSIGN, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public DeclvarContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_declvar; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitDeclvar(this); + else return visitor.visitChildren(this); + } + } + + public final DeclvarContext declvar() throws RecognitionException { + DeclvarContext _localctx = new DeclvarContext(_ctx, getState()); + enterRule(_localctx, 16, RULE_declvar); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(174); + match(ID); + setState(177); + _la = _input.LA(1); + if (_la==ASSIGN) { + { + setState(175); + match(ASSIGN); + setState(176); + expression(0); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExpressionContext extends ParserRuleContext { + public ExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_expression; } + + public ExpressionContext() { } + public void copyFrom(ExpressionContext ctx) { + super.copyFrom(ctx); + } + } + public static class CompContext extends ExpressionContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode LT() { return getToken(PlanAParser.LT, 0); } + public TerminalNode LTE() { return getToken(PlanAParser.LTE, 0); } + public TerminalNode GT() { return getToken(PlanAParser.GT, 0); } + public TerminalNode GTE() { return getToken(PlanAParser.GTE, 0); } + public TerminalNode EQ() { return getToken(PlanAParser.EQ, 0); } + public TerminalNode EQR() { return getToken(PlanAParser.EQR, 0); } + public TerminalNode NE() { return getToken(PlanAParser.NE, 0); } + public TerminalNode NER() { return getToken(PlanAParser.NER, 0); } + public CompContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitComp(this); + else return visitor.visitChildren(this); + } + } + public static class BoolContext extends ExpressionContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode BOOLAND() { return getToken(PlanAParser.BOOLAND, 0); } + public TerminalNode BOOLOR() { return getToken(PlanAParser.BOOLOR, 0); } + public BoolContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBool(this); + else return visitor.visitChildren(this); + } + } + public static class ConditionalContext extends ExpressionContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode COND() { return getToken(PlanAParser.COND, 0); } + public TerminalNode COLON() { return getToken(PlanAParser.COLON, 0); } + public ConditionalContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitConditional(this); + else return visitor.visitChildren(this); + } + } + public static class AssignmentContext extends ExpressionContext { + public ExtstartContext extstart() { + return getRuleContext(ExtstartContext.class,0); + } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode ASSIGN() { return getToken(PlanAParser.ASSIGN, 0); } + public TerminalNode AADD() { return getToken(PlanAParser.AADD, 0); } + public TerminalNode ASUB() { return getToken(PlanAParser.ASUB, 0); } + public TerminalNode AMUL() { return getToken(PlanAParser.AMUL, 0); } + public TerminalNode ADIV() { return getToken(PlanAParser.ADIV, 0); } + public TerminalNode AREM() { return getToken(PlanAParser.AREM, 0); } + public TerminalNode AAND() { return getToken(PlanAParser.AAND, 0); } + public TerminalNode AXOR() { return getToken(PlanAParser.AXOR, 0); } + public TerminalNode AOR() { return getToken(PlanAParser.AOR, 0); } + public TerminalNode ALSH() { return getToken(PlanAParser.ALSH, 0); } + public TerminalNode ARSH() { return getToken(PlanAParser.ARSH, 0); } + public TerminalNode AUSH() { return getToken(PlanAParser.AUSH, 0); } + public AssignmentContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitAssignment(this); + else return visitor.visitChildren(this); + } + } + public static class FalseContext extends ExpressionContext { + public TerminalNode FALSE() { return getToken(PlanAParser.FALSE, 0); } + public FalseContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitFalse(this); + else return visitor.visitChildren(this); + } + } + public static class NumericContext extends ExpressionContext { + public TerminalNode OCTAL() { return getToken(PlanAParser.OCTAL, 0); } + public TerminalNode HEX() { return getToken(PlanAParser.HEX, 0); } + public TerminalNode INTEGER() { return getToken(PlanAParser.INTEGER, 0); } + public TerminalNode DECIMAL() { return getToken(PlanAParser.DECIMAL, 0); } + public NumericContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitNumeric(this); + else return visitor.visitChildren(this); + } + } + public static class UnaryContext extends ExpressionContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode BOOLNOT() { return getToken(PlanAParser.BOOLNOT, 0); } + public TerminalNode BWNOT() { return getToken(PlanAParser.BWNOT, 0); } + public TerminalNode ADD() { return getToken(PlanAParser.ADD, 0); } + public TerminalNode SUB() { return getToken(PlanAParser.SUB, 0); } + public UnaryContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitUnary(this); + else return visitor.visitChildren(this); + } + } + public static class PrecedenceContext extends ExpressionContext { + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public PrecedenceContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPrecedence(this); + else return visitor.visitChildren(this); + } + } + public static class PreincContext extends ExpressionContext { + public IncrementContext increment() { + return getRuleContext(IncrementContext.class,0); + } + public ExtstartContext extstart() { + return getRuleContext(ExtstartContext.class,0); + } + public PreincContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPreinc(this); + else return visitor.visitChildren(this); + } + } + public static class PostincContext extends ExpressionContext { + public ExtstartContext extstart() { + return getRuleContext(ExtstartContext.class,0); + } + public IncrementContext increment() { + return getRuleContext(IncrementContext.class,0); + } + public PostincContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitPostinc(this); + else return visitor.visitChildren(this); + } + } + public static class CastContext extends ExpressionContext { + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public CastContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitCast(this); + else return visitor.visitChildren(this); + } + } + public static class ExternalContext extends ExpressionContext { + public ExtstartContext extstart() { + return getRuleContext(ExtstartContext.class,0); + } + public ExternalContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExternal(this); + else return visitor.visitChildren(this); + } + } + public static class NullContext extends ExpressionContext { + public TerminalNode NULL() { return getToken(PlanAParser.NULL, 0); } + public NullContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitNull(this); + else return visitor.visitChildren(this); + } + } + public static class BinaryContext extends ExpressionContext { + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public TerminalNode MUL() { return getToken(PlanAParser.MUL, 0); } + public TerminalNode DIV() { return getToken(PlanAParser.DIV, 0); } + public TerminalNode REM() { return getToken(PlanAParser.REM, 0); } + public TerminalNode ADD() { return getToken(PlanAParser.ADD, 0); } + public TerminalNode SUB() { return getToken(PlanAParser.SUB, 0); } + public TerminalNode LSH() { return getToken(PlanAParser.LSH, 0); } + public TerminalNode RSH() { return getToken(PlanAParser.RSH, 0); } + public TerminalNode USH() { return getToken(PlanAParser.USH, 0); } + public TerminalNode BWAND() { return getToken(PlanAParser.BWAND, 0); } + public TerminalNode BWXOR() { return getToken(PlanAParser.BWXOR, 0); } + public TerminalNode BWOR() { return getToken(PlanAParser.BWOR, 0); } + public BinaryContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitBinary(this); + else return visitor.visitChildren(this); + } + } + public static class CharContext extends ExpressionContext { + public TerminalNode CHAR() { return getToken(PlanAParser.CHAR, 0); } + public CharContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitChar(this); + else return visitor.visitChildren(this); + } + } + public static class TrueContext extends ExpressionContext { + public TerminalNode TRUE() { return getToken(PlanAParser.TRUE, 0); } + public TrueContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTrue(this); + else return visitor.visitChildren(this); + } + } + + public final ExpressionContext expression() throws RecognitionException { + return expression(0); + } + + private ExpressionContext expression(int _p) throws RecognitionException { + ParserRuleContext _parentctx = _ctx; + int _parentState = getState(); + ExpressionContext _localctx = new ExpressionContext(_ctx, _parentState); + ExpressionContext _prevctx = _localctx; + int _startState = 18; + enterRecursionRule(_localctx, 18, RULE_expression, _p); + int _la; + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(207); + switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + case 1: + { + _localctx = new UnaryContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + + setState(180); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(181); + expression(14); + } + break; + case 2: + { + _localctx = new CastContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(182); + match(LP); + setState(183); + decltype(); + setState(184); + match(RP); + setState(185); + expression(13); + } + break; + case 3: + { + _localctx = new AssignmentContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(187); + extstart(); + setState(188); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ASSIGN) | (1L << AADD) | (1L << ASUB) | (1L << AMUL) | (1L << ADIV) | (1L << AREM) | (1L << AAND) | (1L << AXOR) | (1L << AOR) | (1L << ALSH) | (1L << ARSH) | (1L << AUSH))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(189); + expression(1); + } + break; + case 4: + { + _localctx = new PrecedenceContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(191); + match(LP); + setState(192); + expression(0); + setState(193); + match(RP); + } + break; + case 5: + { + _localctx = new NumericContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(195); + _la = _input.LA(1); + if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + break; + case 6: + { + _localctx = new CharContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(196); + match(CHAR); + } + break; + case 7: + { + _localctx = new TrueContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(197); + match(TRUE); + } + break; + case 8: + { + _localctx = new FalseContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(198); + match(FALSE); + } + break; + case 9: + { + _localctx = new NullContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(199); + match(NULL); + } + break; + case 10: + { + _localctx = new PostincContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(200); + extstart(); + setState(201); + increment(); + } + break; + case 11: + { + _localctx = new PreincContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(203); + increment(); + setState(204); + extstart(); + } + break; + case 12: + { + _localctx = new ExternalContext(_localctx); + _ctx = _localctx; + _prevctx = _localctx; + setState(206); + extstart(); + } + break; + } + _ctx.stop = _input.LT(-1); + setState(247); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + if ( _parseListeners!=null ) triggerExitRuleEvent(); + _prevctx = _localctx; + { + setState(245); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + case 1: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(209); + if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); + setState(210); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(211); + expression(13); + } + break; + case 2: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(212); + if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); + setState(213); + _la = _input.LA(1); + if ( !(_la==ADD || _la==SUB) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(214); + expression(12); + } + break; + case 3: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(215); + if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); + setState(216); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(217); + expression(11); + } + break; + case 4: + { + _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(218); + if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); + setState(219); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(220); + expression(10); + } + break; + case 5: + { + _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(221); + if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); + setState(222); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(223); + expression(9); + } + break; + case 6: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(224); + if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); + setState(225); + match(BWAND); + setState(226); + expression(8); + } + break; + case 7: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(227); + if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); + setState(228); + match(BWXOR); + setState(229); + expression(7); + } + break; + case 8: + { + _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(230); + if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); + setState(231); + match(BWOR); + setState(232); + expression(6); + } + break; + case 9: + { + _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(233); + if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); + setState(234); + match(BOOLAND); + setState(235); + expression(5); + } + break; + case 10: + { + _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(236); + if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); + setState(237); + match(BOOLOR); + setState(238); + expression(4); + } + break; + case 11: + { + _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(239); + if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); + setState(240); + match(COND); + setState(241); + expression(0); + setState(242); + match(COLON); + setState(243); + expression(2); + } + break; + } + } + } + setState(249); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + unrollRecursionContexts(_parentctx); + } + return _localctx; + } + + public static class ExtstartContext extends ParserRuleContext { + public ExtprecContext extprec() { + return getRuleContext(ExtprecContext.class,0); + } + public ExtcastContext extcast() { + return getRuleContext(ExtcastContext.class,0); + } + public ExttypeContext exttype() { + return getRuleContext(ExttypeContext.class,0); + } + public ExtvarContext extvar() { + return getRuleContext(ExtvarContext.class,0); + } + public ExtnewContext extnew() { + return getRuleContext(ExtnewContext.class,0); + } + public ExtstringContext extstring() { + return getRuleContext(ExtstringContext.class,0); + } + public ExtstartContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extstart; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtstart(this); + else return visitor.visitChildren(this); + } + } + + public final ExtstartContext extstart() throws RecognitionException { + ExtstartContext _localctx = new ExtstartContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_extstart); + try { + setState(256); + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(250); + extprec(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(251); + extcast(); + } + break; + case 3: + enterOuterAlt(_localctx, 3); + { + setState(252); + exttype(); + } + break; + case 4: + enterOuterAlt(_localctx, 4); + { + setState(253); + extvar(); + } + break; + case 5: + enterOuterAlt(_localctx, 5); + { + setState(254); + extnew(); + } + break; + case 6: + enterOuterAlt(_localctx, 6); + { + setState(255); + extstring(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtprecContext extends ParserRuleContext { + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public ExtprecContext extprec() { + return getRuleContext(ExtprecContext.class,0); + } + public ExtcastContext extcast() { + return getRuleContext(ExtcastContext.class,0); + } + public ExttypeContext exttype() { + return getRuleContext(ExttypeContext.class,0); + } + public ExtvarContext extvar() { + return getRuleContext(ExtvarContext.class,0); + } + public ExtnewContext extnew() { + return getRuleContext(ExtnewContext.class,0); + } + public ExtstringContext extstring() { + return getRuleContext(ExtstringContext.class,0); + } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtprecContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extprec; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtprec(this); + else return visitor.visitChildren(this); + } + } + + public final ExtprecContext extprec() throws RecognitionException { + ExtprecContext _localctx = new ExtprecContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_extprec); + try { + enterOuterAlt(_localctx, 1); + { + setState(258); + match(LP); + setState(265); + switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { + case 1: + { + setState(259); + extprec(); + } + break; + case 2: + { + setState(260); + extcast(); + } + break; + case 3: + { + setState(261); + exttype(); + } + break; + case 4: + { + setState(262); + extvar(); + } + break; + case 5: + { + setState(263); + extnew(); + } + break; + case 6: + { + setState(264); + extstring(); + } + break; + } + setState(267); + match(RP); + setState(270); + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + case 1: + { + setState(268); + extdot(); + } + break; + case 2: + { + setState(269); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtcastContext extends ParserRuleContext { + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public ExtprecContext extprec() { + return getRuleContext(ExtprecContext.class,0); + } + public ExtcastContext extcast() { + return getRuleContext(ExtcastContext.class,0); + } + public ExttypeContext exttype() { + return getRuleContext(ExttypeContext.class,0); + } + public ExtvarContext extvar() { + return getRuleContext(ExtvarContext.class,0); + } + public ExtnewContext extnew() { + return getRuleContext(ExtnewContext.class,0); + } + public ExtstringContext extstring() { + return getRuleContext(ExtstringContext.class,0); + } + public ExtcastContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extcast; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtcast(this); + else return visitor.visitChildren(this); + } + } + + public final ExtcastContext extcast() throws RecognitionException { + ExtcastContext _localctx = new ExtcastContext(_ctx, getState()); + enterRule(_localctx, 24, RULE_extcast); + try { + enterOuterAlt(_localctx, 1); + { + setState(272); + match(LP); + setState(273); + decltype(); + setState(274); + match(RP); + setState(281); + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + case 1: + { + setState(275); + extprec(); + } + break; + case 2: + { + setState(276); + extcast(); + } + break; + case 3: + { + setState(277); + exttype(); + } + break; + case 4: + { + setState(278); + extvar(); + } + break; + case 5: + { + setState(279); + extnew(); + } + break; + case 6: + { + setState(280); + extstring(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtbraceContext extends ParserRuleContext { + public TerminalNode LBRACE() { return getToken(PlanAParser.LBRACE, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RBRACE() { return getToken(PlanAParser.RBRACE, 0); } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtbraceContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extbrace; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtbrace(this); + else return visitor.visitChildren(this); + } + } + + public final ExtbraceContext extbrace() throws RecognitionException { + ExtbraceContext _localctx = new ExtbraceContext(_ctx, getState()); + enterRule(_localctx, 26, RULE_extbrace); + try { + enterOuterAlt(_localctx, 1); + { + setState(283); + match(LBRACE); + setState(284); + expression(0); + setState(285); + match(RBRACE); + setState(288); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: + { + setState(286); + extdot(); + } + break; + case 2: + { + setState(287); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtdotContext extends ParserRuleContext { + public TerminalNode DOT() { return getToken(PlanAParser.DOT, 0); } + public ExtcallContext extcall() { + return getRuleContext(ExtcallContext.class,0); + } + public ExtfieldContext extfield() { + return getRuleContext(ExtfieldContext.class,0); + } + public ExtdotContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extdot; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtdot(this); + else return visitor.visitChildren(this); + } + } + + public final ExtdotContext extdot() throws RecognitionException { + ExtdotContext _localctx = new ExtdotContext(_ctx, getState()); + enterRule(_localctx, 28, RULE_extdot); + try { + enterOuterAlt(_localctx, 1); + { + setState(290); + match(DOT); + setState(293); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + case 1: + { + setState(291); + extcall(); + } + break; + case 2: + { + setState(292); + extfield(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExttypeContext extends ParserRuleContext { + public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExttypeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_exttype; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExttype(this); + else return visitor.visitChildren(this); + } + } + + public final ExttypeContext exttype() throws RecognitionException { + ExttypeContext _localctx = new ExttypeContext(_ctx, getState()); + enterRule(_localctx, 30, RULE_exttype); + try { + enterOuterAlt(_localctx, 1); + { + setState(295); + match(TYPE); + setState(296); + extdot(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtcallContext extends ParserRuleContext { + public TerminalNode EXTID() { return getToken(PlanAParser.EXTID, 0); } + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class,0); + } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtcallContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extcall; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtcall(this); + else return visitor.visitChildren(this); + } + } + + public final ExtcallContext extcall() throws RecognitionException { + ExtcallContext _localctx = new ExtcallContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_extcall); + try { + enterOuterAlt(_localctx, 1); + { + setState(298); + match(EXTID); + setState(299); + arguments(); + setState(302); + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + case 1: + { + setState(300); + extdot(); + } + break; + case 2: + { + setState(301); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtvarContext extends ParserRuleContext { + public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtvarContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extvar; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtvar(this); + else return visitor.visitChildren(this); + } + } + + public final ExtvarContext extvar() throws RecognitionException { + ExtvarContext _localctx = new ExtvarContext(_ctx, getState()); + enterRule(_localctx, 34, RULE_extvar); + try { + enterOuterAlt(_localctx, 1); + { + setState(304); + match(ID); + setState(307); + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + case 1: + { + setState(305); + extdot(); + } + break; + case 2: + { + setState(306); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtfieldContext extends ParserRuleContext { + public TerminalNode EXTID() { return getToken(PlanAParser.EXTID, 0); } + public TerminalNode EXTINTEGER() { return getToken(PlanAParser.EXTINTEGER, 0); } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtfieldContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extfield; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtfield(this); + else return visitor.visitChildren(this); + } + } + + public final ExtfieldContext extfield() throws RecognitionException { + ExtfieldContext _localctx = new ExtfieldContext(_ctx, getState()); + enterRule(_localctx, 36, RULE_extfield); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(309); + _la = _input.LA(1); + if ( !(_la==EXTINTEGER || _la==EXTID) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(312); + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + case 1: + { + setState(310); + extdot(); + } + break; + case 2: + { + setState(311); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtnewContext extends ParserRuleContext { + public TerminalNode NEW() { return getToken(PlanAParser.NEW, 0); } + public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class,0); + } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public List LBRACE() { return getTokens(PlanAParser.LBRACE); } + public TerminalNode LBRACE(int i) { + return getToken(PlanAParser.LBRACE, i); + } + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public List RBRACE() { return getTokens(PlanAParser.RBRACE); } + public TerminalNode RBRACE(int i) { + return getToken(PlanAParser.RBRACE, i); + } + public ExtnewContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extnew; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtnew(this); + else return visitor.visitChildren(this); + } + } + + public final ExtnewContext extnew() throws RecognitionException { + ExtnewContext _localctx = new ExtnewContext(_ctx, getState()); + enterRule(_localctx, 38, RULE_extnew); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(314); + match(NEW); + setState(315); + match(TYPE); + setState(332); + switch (_input.LA(1)) { + case LP: + { + { + setState(316); + arguments(); + setState(319); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + case 1: + { + setState(317); + extdot(); + } + break; + case 2: + { + setState(318); + extbrace(); + } + break; + } + } + } + break; + case LBRACE: + { + { + setState(325); + _errHandler.sync(this); + _alt = 1; + do { + switch (_alt) { + case 1: + { + { + setState(321); + match(LBRACE); + setState(322); + expression(0); + setState(323); + match(RBRACE); + } + } + break; + default: + throw new NoViableAltException(this); + } + setState(327); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); + setState(330); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + case 1: + { + setState(329); + extdot(); + } + break; + } + } + } + break; + default: + throw new NoViableAltException(this); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtstringContext extends ParserRuleContext { + public TerminalNode STRING() { return getToken(PlanAParser.STRING, 0); } + public ExtdotContext extdot() { + return getRuleContext(ExtdotContext.class,0); + } + public ExtbraceContext extbrace() { + return getRuleContext(ExtbraceContext.class,0); + } + public ExtstringContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extstring; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitExtstring(this); + else return visitor.visitChildren(this); + } + } + + public final ExtstringContext extstring() throws RecognitionException { + ExtstringContext _localctx = new ExtstringContext(_ctx, getState()); + enterRule(_localctx, 40, RULE_extstring); + try { + enterOuterAlt(_localctx, 1); + { + setState(334); + match(STRING); + setState(337); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + case 1: + { + setState(335); + extdot(); + } + break; + case 2: + { + setState(336); + extbrace(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ArgumentsContext extends ParserRuleContext { + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public List expression() { + return getRuleContexts(ExpressionContext.class); + } + public ExpressionContext expression(int i) { + return getRuleContext(ExpressionContext.class,i); + } + public List COMMA() { return getTokens(PlanAParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(PlanAParser.COMMA, i); + } + public ArgumentsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_arguments; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitArguments(this); + else return visitor.visitChildren(this); + } + } + + public final ArgumentsContext arguments() throws RecognitionException { + ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); + enterRule(_localctx, 42, RULE_arguments); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + { + setState(339); + match(LP); + setState(348); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + { + setState(340); + expression(0); + setState(345); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(341); + match(COMMA); + setState(342); + expression(0); + } + } + setState(347); + _errHandler.sync(this); + _la = _input.LA(1); + } + } + } + + setState(350); + match(RP); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class IncrementContext extends ParserRuleContext { + public TerminalNode INCR() { return getToken(PlanAParser.INCR, 0); } + public TerminalNode DECR() { return getToken(PlanAParser.DECR, 0); } + public IncrementContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_increment; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitIncrement(this); + else return visitor.visitChildren(this); + } + } + + public final IncrementContext increment() throws RecognitionException { + IncrementContext _localctx = new IncrementContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_increment); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(352); + _la = _input.LA(1); + if ( !(_la==INCR || _la==DECR) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 9: + return expression_sempred((ExpressionContext)_localctx, predIndex); + } + return true; + } + private boolean expression_sempred(ExpressionContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return precpred(_ctx, 12); + case 1: + return precpred(_ctx, 11); + case 2: + return precpred(_ctx, 10); + case 3: + return precpred(_ctx, 9); + case 4: + return precpred(_ctx, 8); + case 5: + return precpred(_ctx, 7); + case 6: + return precpred(_ctx, 6); + case 7: + return precpred(_ctx, 5); + case 8: + return precpred(_ctx, 4); + case 9: + return precpred(_ctx, 3); + case 10: + return precpred(_ctx, 2); + } + return true; + } + + public static final String _serializedATN = + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3N\u0165\4\2\t\2\4"+ + "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ + "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ + "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\3\2\6\2\62"+ + "\n\2\r\2\16\2\63\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3?\n\3\3\3\3\3"+ + "\3\3\3\3\3\3\3\3\5\3G\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3P\n\3\3\3\3\3"+ + "\3\3\5\3U\n\3\3\3\3\3\5\3Y\n\3\3\3\3\3\5\3]\n\3\3\3\3\3\3\3\5\3b\n\3\3"+ + "\3\3\3\5\3f\n\3\3\3\3\3\5\3j\n\3\3\3\3\3\5\3n\n\3\3\3\3\3\3\3\5\3s\n\3"+ + "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3~\n\3\r\3\16\3\177\3\3\3\3\3\3"+ + "\5\3\u0085\n\3\3\3\3\3\5\3\u0089\n\3\5\3\u008b\n\3\3\4\3\4\7\4\u008f\n"+ + "\4\f\4\16\4\u0092\13\4\3\4\3\4\5\4\u0096\n\4\3\5\3\5\3\6\3\6\5\6\u009c"+ + "\n\6\3\7\3\7\3\b\3\b\3\b\3\b\7\b\u00a4\n\b\f\b\16\b\u00a7\13\b\3\t\3\t"+ + "\3\t\7\t\u00ac\n\t\f\t\16\t\u00af\13\t\3\n\3\n\3\n\5\n\u00b4\n\n\3\13"+ + "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ + "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13"+ + "\u00d2\n\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ + "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ + "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u00f8\n\13\f\13"+ + "\16\13\u00fb\13\13\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u0103\n\f\3\r\3\r\3\r\3"+ + "\r\3\r\3\r\3\r\5\r\u010c\n\r\3\r\3\r\3\r\5\r\u0111\n\r\3\16\3\16\3\16"+ + "\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u011c\n\16\3\17\3\17\3\17\3\17\3\17"+ + "\5\17\u0123\n\17\3\20\3\20\3\20\5\20\u0128\n\20\3\21\3\21\3\21\3\22\3"+ + "\22\3\22\3\22\5\22\u0131\n\22\3\23\3\23\3\23\5\23\u0136\n\23\3\24\3\24"+ + "\3\24\5\24\u013b\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u0142\n\25\3\25\3"+ + "\25\3\25\3\25\6\25\u0148\n\25\r\25\16\25\u0149\3\25\5\25\u014d\n\25\5"+ + "\25\u014f\n\25\3\26\3\26\3\26\5\26\u0154\n\26\3\27\3\27\3\27\3\27\7\27"+ + "\u015a\n\27\f\27\16\27\u015d\13\27\5\27\u015f\n\27\3\27\3\27\3\30\3\30"+ + "\3\30\2\3\24\31\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\2\f\4"+ + "\2\32\33\37 \3\2\65@\3\2BE\3\2\34\36\3\2\37 \3\2!#\3\2$\'\3\2(+\3\2MN"+ + "\3\2\63\64\u01a5\2\61\3\2\2\2\4\u008a\3\2\2\2\6\u0095\3\2\2\2\b\u0097"+ + "\3\2\2\2\n\u009b\3\2\2\2\f\u009d\3\2\2\2\16\u009f\3\2\2\2\20\u00a8\3\2"+ + "\2\2\22\u00b0\3\2\2\2\24\u00d1\3\2\2\2\26\u0102\3\2\2\2\30\u0104\3\2\2"+ + "\2\32\u0112\3\2\2\2\34\u011d\3\2\2\2\36\u0124\3\2\2\2 \u0129\3\2\2\2\""+ + "\u012c\3\2\2\2$\u0132\3\2\2\2&\u0137\3\2\2\2(\u013c\3\2\2\2*\u0150\3\2"+ + "\2\2,\u0155\3\2\2\2.\u0162\3\2\2\2\60\62\5\4\3\2\61\60\3\2\2\2\62\63\3"+ + "\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\65\3\2\2\2\65\66\7\2\2\3\66\3\3"+ + "\2\2\2\678\7\16\2\289\7\t\2\29:\5\24\13\2:;\7\n\2\2;>\5\6\4\2<=\7\17\2"+ + "\2=?\5\6\4\2><\3\2\2\2>?\3\2\2\2?\u008b\3\2\2\2@A\7\20\2\2AB\7\t\2\2B"+ + "C\5\24\13\2CF\7\n\2\2DG\5\6\4\2EG\5\b\5\2FD\3\2\2\2FE\3\2\2\2G\u008b\3"+ + "\2\2\2HI\7\21\2\2IJ\5\6\4\2JK\7\20\2\2KL\7\t\2\2LM\5\24\13\2MO\7\n\2\2"+ + "NP\7\r\2\2ON\3\2\2\2OP\3\2\2\2P\u008b\3\2\2\2QR\7\22\2\2RT\7\t\2\2SU\5"+ + "\n\6\2TS\3\2\2\2TU\3\2\2\2UV\3\2\2\2VX\7\r\2\2WY\5\24\13\2XW\3\2\2\2X"+ + "Y\3\2\2\2YZ\3\2\2\2Z\\\7\r\2\2[]\5\f\7\2\\[\3\2\2\2\\]\3\2\2\2]^\3\2\2"+ + "\2^a\7\n\2\2_b\5\6\4\2`b\5\b\5\2a_\3\2\2\2a`\3\2\2\2b\u008b\3\2\2\2ce"+ + "\5\16\b\2df\7\r\2\2ed\3\2\2\2ef\3\2\2\2f\u008b\3\2\2\2gi\7\23\2\2hj\7"+ + "\r\2\2ih\3\2\2\2ij\3\2\2\2j\u008b\3\2\2\2km\7\24\2\2ln\7\r\2\2ml\3\2\2"+ + "\2mn\3\2\2\2n\u008b\3\2\2\2op\7\25\2\2pr\5\24\13\2qs\7\r\2\2rq\3\2\2\2"+ + "rs\3\2\2\2s\u008b\3\2\2\2tu\7\27\2\2u}\5\6\4\2vw\7\30\2\2wx\7\t\2\2xy"+ + "\7K\2\2yz\7L\2\2z{\3\2\2\2{|\7\n\2\2|~\5\6\4\2}v\3\2\2\2~\177\3\2\2\2"+ + "\177}\3\2\2\2\177\u0080\3\2\2\2\u0080\u008b\3\2\2\2\u0081\u0082\7\31\2"+ + "\2\u0082\u0084\5\24\13\2\u0083\u0085\7\r\2\2\u0084\u0083\3\2\2\2\u0084"+ + "\u0085\3\2\2\2\u0085\u008b\3\2\2\2\u0086\u0088\5\24\13\2\u0087\u0089\7"+ + "\r\2\2\u0088\u0087\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u008b\3\2\2\2\u008a"+ + "\67\3\2\2\2\u008a@\3\2\2\2\u008aH\3\2\2\2\u008aQ\3\2\2\2\u008ac\3\2\2"+ + "\2\u008ag\3\2\2\2\u008ak\3\2\2\2\u008ao\3\2\2\2\u008at\3\2\2\2\u008a\u0081"+ + "\3\2\2\2\u008a\u0086\3\2\2\2\u008b\5\3\2\2\2\u008c\u0090\7\5\2\2\u008d"+ + "\u008f\5\4\3\2\u008e\u008d\3\2\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2"+ + "\2\2\u0090\u0091\3\2\2\2\u0091\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093"+ + "\u0096\7\6\2\2\u0094\u0096\5\4\3\2\u0095\u008c\3\2\2\2\u0095\u0094\3\2"+ + "\2\2\u0096\7\3\2\2\2\u0097\u0098\7\r\2\2\u0098\t\3\2\2\2\u0099\u009c\5"+ + "\16\b\2\u009a\u009c\5\24\13\2\u009b\u0099\3\2\2\2\u009b\u009a\3\2\2\2"+ + "\u009c\13\3\2\2\2\u009d\u009e\5\24\13\2\u009e\r\3\2\2\2\u009f\u00a0\5"+ + "\20\t\2\u00a0\u00a5\5\22\n\2\u00a1\u00a2\7\f\2\2\u00a2\u00a4\5\22\n\2"+ + "\u00a3\u00a1\3\2\2\2\u00a4\u00a7\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6"+ + "\3\2\2\2\u00a6\17\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a8\u00ad\7K\2\2\u00a9"+ + "\u00aa\7\7\2\2\u00aa\u00ac\7\b\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00af\3\2"+ + "\2\2\u00ad\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\21\3\2\2\2\u00af\u00ad"+ + "\3\2\2\2\u00b0\u00b3\7L\2\2\u00b1\u00b2\7\65\2\2\u00b2\u00b4\5\24\13\2"+ + "\u00b3\u00b1\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\23\3\2\2\2\u00b5\u00b6"+ + "\b\13\1\2\u00b6\u00b7\t\2\2\2\u00b7\u00d2\5\24\13\20\u00b8\u00b9\7\t\2"+ + "\2\u00b9\u00ba\5\20\t\2\u00ba\u00bb\7\n\2\2\u00bb\u00bc\5\24\13\17\u00bc"+ + "\u00d2\3\2\2\2\u00bd\u00be\5\26\f\2\u00be\u00bf\t\3\2\2\u00bf\u00c0\5"+ + "\24\13\3\u00c0\u00d2\3\2\2\2\u00c1\u00c2\7\t\2\2\u00c2\u00c3\5\24\13\2"+ + "\u00c3\u00c4\7\n\2\2\u00c4\u00d2\3\2\2\2\u00c5\u00d2\t\4\2\2\u00c6\u00d2"+ + "\7G\2\2\u00c7\u00d2\7H\2\2\u00c8\u00d2\7I\2\2\u00c9\u00d2\7J\2\2\u00ca"+ + "\u00cb\5\26\f\2\u00cb\u00cc\5.\30\2\u00cc\u00d2\3\2\2\2\u00cd\u00ce\5"+ + ".\30\2\u00ce\u00cf\5\26\f\2\u00cf\u00d2\3\2\2\2\u00d0\u00d2\5\26\f\2\u00d1"+ + "\u00b5\3\2\2\2\u00d1\u00b8\3\2\2\2\u00d1\u00bd\3\2\2\2\u00d1\u00c1\3\2"+ + "\2\2\u00d1\u00c5\3\2\2\2\u00d1\u00c6\3\2\2\2\u00d1\u00c7\3\2\2\2\u00d1"+ + "\u00c8\3\2\2\2\u00d1\u00c9\3\2\2\2\u00d1\u00ca\3\2\2\2\u00d1\u00cd\3\2"+ + "\2\2\u00d1\u00d0\3\2\2\2\u00d2\u00f9\3\2\2\2\u00d3\u00d4\f\16\2\2\u00d4"+ + "\u00d5\t\5\2\2\u00d5\u00f8\5\24\13\17\u00d6\u00d7\f\r\2\2\u00d7\u00d8"+ + "\t\6\2\2\u00d8\u00f8\5\24\13\16\u00d9\u00da\f\f\2\2\u00da\u00db\t\7\2"+ + "\2\u00db\u00f8\5\24\13\r\u00dc\u00dd\f\13\2\2\u00dd\u00de\t\b\2\2\u00de"+ + "\u00f8\5\24\13\f\u00df\u00e0\f\n\2\2\u00e0\u00e1\t\t\2\2\u00e1\u00f8\5"+ + "\24\13\13\u00e2\u00e3\f\t\2\2\u00e3\u00e4\7,\2\2\u00e4\u00f8\5\24\13\n"+ + "\u00e5\u00e6\f\b\2\2\u00e6\u00e7\7-\2\2\u00e7\u00f8\5\24\13\t\u00e8\u00e9"+ + "\f\7\2\2\u00e9\u00ea\7.\2\2\u00ea\u00f8\5\24\13\b\u00eb\u00ec\f\6\2\2"+ + "\u00ec\u00ed\7/\2\2\u00ed\u00f8\5\24\13\7\u00ee\u00ef\f\5\2\2\u00ef\u00f0"+ + "\7\60\2\2\u00f0\u00f8\5\24\13\6\u00f1\u00f2\f\4\2\2\u00f2\u00f3\7\61\2"+ + "\2\u00f3\u00f4\5\24\13\2\u00f4\u00f5\7\62\2\2\u00f5\u00f6\5\24\13\4\u00f6"+ + "\u00f8\3\2\2\2\u00f7\u00d3\3\2\2\2\u00f7\u00d6\3\2\2\2\u00f7\u00d9\3\2"+ + "\2\2\u00f7\u00dc\3\2\2\2\u00f7\u00df\3\2\2\2\u00f7\u00e2\3\2\2\2\u00f7"+ + "\u00e5\3\2\2\2\u00f7\u00e8\3\2\2\2\u00f7\u00eb\3\2\2\2\u00f7\u00ee\3\2"+ + "\2\2\u00f7\u00f1\3\2\2\2\u00f8\u00fb\3\2\2\2\u00f9\u00f7\3\2\2\2\u00f9"+ + "\u00fa\3\2\2\2\u00fa\25\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fc\u0103\5\30\r"+ + "\2\u00fd\u0103\5\32\16\2\u00fe\u0103\5 \21\2\u00ff\u0103\5$\23\2\u0100"+ + "\u0103\5(\25\2\u0101\u0103\5*\26\2\u0102\u00fc\3\2\2\2\u0102\u00fd\3\2"+ + "\2\2\u0102\u00fe\3\2\2\2\u0102\u00ff\3\2\2\2\u0102\u0100\3\2\2\2\u0102"+ + "\u0101\3\2\2\2\u0103\27\3\2\2\2\u0104\u010b\7\t\2\2\u0105\u010c\5\30\r"+ + "\2\u0106\u010c\5\32\16\2\u0107\u010c\5 \21\2\u0108\u010c\5$\23\2\u0109"+ + "\u010c\5(\25\2\u010a\u010c\5*\26\2\u010b\u0105\3\2\2\2\u010b\u0106\3\2"+ + "\2\2\u010b\u0107\3\2\2\2\u010b\u0108\3\2\2\2\u010b\u0109\3\2\2\2\u010b"+ + "\u010a\3\2\2\2\u010c\u010d\3\2\2\2\u010d\u0110\7\n\2\2\u010e\u0111\5\36"+ + "\20\2\u010f\u0111\5\34\17\2\u0110\u010e\3\2\2\2\u0110\u010f\3\2\2\2\u0110"+ + "\u0111\3\2\2\2\u0111\31\3\2\2\2\u0112\u0113\7\t\2\2\u0113\u0114\5\20\t"+ + "\2\u0114\u011b\7\n\2\2\u0115\u011c\5\30\r\2\u0116\u011c\5\32\16\2\u0117"+ + "\u011c\5 \21\2\u0118\u011c\5$\23\2\u0119\u011c\5(\25\2\u011a\u011c\5*"+ + "\26\2\u011b\u0115\3\2\2\2\u011b\u0116\3\2\2\2\u011b\u0117\3\2\2\2\u011b"+ + "\u0118\3\2\2\2\u011b\u0119\3\2\2\2\u011b\u011a\3\2\2\2\u011c\33\3\2\2"+ + "\2\u011d\u011e\7\7\2\2\u011e\u011f\5\24\13\2\u011f\u0122\7\b\2\2\u0120"+ + "\u0123\5\36\20\2\u0121\u0123\5\34\17\2\u0122\u0120\3\2\2\2\u0122\u0121"+ + "\3\2\2\2\u0122\u0123\3\2\2\2\u0123\35\3\2\2\2\u0124\u0127\7\13\2\2\u0125"+ + "\u0128\5\"\22\2\u0126\u0128\5&\24\2\u0127\u0125\3\2\2\2\u0127\u0126\3"+ + "\2\2\2\u0128\37\3\2\2\2\u0129\u012a\7K\2\2\u012a\u012b\5\36\20\2\u012b"+ + "!\3\2\2\2\u012c\u012d\7N\2\2\u012d\u0130\5,\27\2\u012e\u0131\5\36\20\2"+ + "\u012f\u0131\5\34\17\2\u0130\u012e\3\2\2\2\u0130\u012f\3\2\2\2\u0130\u0131"+ + "\3\2\2\2\u0131#\3\2\2\2\u0132\u0135\7L\2\2\u0133\u0136\5\36\20\2\u0134"+ + "\u0136\5\34\17\2\u0135\u0133\3\2\2\2\u0135\u0134\3\2\2\2\u0135\u0136\3"+ + "\2\2\2\u0136%\3\2\2\2\u0137\u013a\t\n\2\2\u0138\u013b\5\36\20\2\u0139"+ + "\u013b\5\34\17\2\u013a\u0138\3\2\2\2\u013a\u0139\3\2\2\2\u013a\u013b\3"+ + "\2\2\2\u013b\'\3\2\2\2\u013c\u013d\7\26\2\2\u013d\u014e\7K\2\2\u013e\u0141"+ + "\5,\27\2\u013f\u0142\5\36\20\2\u0140\u0142\5\34\17\2\u0141\u013f\3\2\2"+ + "\2\u0141\u0140\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u014f\3\2\2\2\u0143\u0144"+ + "\7\7\2\2\u0144\u0145\5\24\13\2\u0145\u0146\7\b\2\2\u0146\u0148\3\2\2\2"+ + "\u0147\u0143\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u0147\3\2\2\2\u0149\u014a"+ + "\3\2\2\2\u014a\u014c\3\2\2\2\u014b\u014d\5\36\20\2\u014c\u014b\3\2\2\2"+ + "\u014c\u014d\3\2\2\2\u014d\u014f\3\2\2\2\u014e\u013e\3\2\2\2\u014e\u0147"+ + "\3\2\2\2\u014f)\3\2\2\2\u0150\u0153\7F\2\2\u0151\u0154\5\36\20\2\u0152"+ + "\u0154\5\34\17\2\u0153\u0151\3\2\2\2\u0153\u0152\3\2\2\2\u0153\u0154\3"+ + "\2\2\2\u0154+\3\2\2\2\u0155\u015e\7\t\2\2\u0156\u015b\5\24\13\2\u0157"+ + "\u0158\7\f\2\2\u0158\u015a\5\24\13\2\u0159\u0157\3\2\2\2\u015a\u015d\3"+ + "\2\2\2\u015b\u0159\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u015f\3\2\2\2\u015d"+ + "\u015b\3\2\2\2\u015e\u0156\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0160\3\2"+ + "\2\2\u0160\u0161\7\n\2\2\u0161-\3\2\2\2\u0162\u0163\t\13\2\2\u0163/\3"+ + "\2\2\2+\63>FOTX\\aeimr\177\u0084\u0088\u008a\u0090\u0095\u009b\u00a5\u00ad"+ + "\u00b3\u00d1\u00f7\u00f9\u0102\u010b\u0110\u011b\u0122\u0127\u0130\u0135"+ + "\u013a\u0141\u0149\u014c\u014e\u0153\u015b\u015e"; + public static final ATN _ATN = + new ATNDeserializer().deserialize(_serializedATN.toCharArray()); + static { + _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; + for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { + _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); + } + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java new file mode 100644 index 00000000000..d731b57676b --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java @@ -0,0 +1,357 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; + +/** + * This class provides an empty implementation of {@link PlanAParserVisitor}, + * which can be extended to create a visitor which only needs to handle a subset + * of the available methods. + * + * @param The return type of the visit operation. Use {@link Void} for + * operations with no return type. + */ +class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements PlanAParserVisitor { + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitSource(PlanAParser.SourceContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitIf(PlanAParser.IfContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitWhile(PlanAParser.WhileContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDo(PlanAParser.DoContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFor(PlanAParser.ForContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDecl(PlanAParser.DeclContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitContinue(PlanAParser.ContinueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBreak(PlanAParser.BreakContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitReturn(PlanAParser.ReturnContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitTry(PlanAParser.TryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitThrow(PlanAParser.ThrowContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExpr(PlanAParser.ExprContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMultiple(PlanAParser.MultipleContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitSingle(PlanAParser.SingleContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitEmpty(PlanAParser.EmptyContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitInitializer(PlanAParser.InitializerContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitAfterthought(PlanAParser.AfterthoughtContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDeclaration(PlanAParser.DeclarationContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDecltype(PlanAParser.DecltypeContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitDeclvar(PlanAParser.DeclvarContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitComp(PlanAParser.CompContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBool(PlanAParser.BoolContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitConditional(PlanAParser.ConditionalContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitAssignment(PlanAParser.AssignmentContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFalse(PlanAParser.FalseContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNumeric(PlanAParser.NumericContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitUnary(PlanAParser.UnaryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPrecedence(PlanAParser.PrecedenceContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPreinc(PlanAParser.PreincContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPostinc(PlanAParser.PostincContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitCast(PlanAParser.CastContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExternal(PlanAParser.ExternalContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitNull(PlanAParser.NullContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBinary(PlanAParser.BinaryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitChar(PlanAParser.CharContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitTrue(PlanAParser.TrueContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtstart(PlanAParser.ExtstartContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtprec(PlanAParser.ExtprecContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtcast(PlanAParser.ExtcastContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtbrace(PlanAParser.ExtbraceContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtdot(PlanAParser.ExtdotContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExttype(PlanAParser.ExttypeContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtcall(PlanAParser.ExtcallContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtvar(PlanAParser.ExtvarContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtfield(PlanAParser.ExtfieldContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtnew(PlanAParser.ExtnewContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitExtstring(PlanAParser.ExtstringContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitArguments(PlanAParser.ArgumentsContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitIncrement(PlanAParser.IncrementContext ctx) { return visitChildren(ctx); } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java new file mode 100644 index 00000000000..7470f3b6ad5 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java @@ -0,0 +1,336 @@ +// ANTLR GENERATED CODE: DO NOT EDIT +package org.elasticsearch.plan.a; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; + +/** + * This interface defines a complete generic visitor for a parse tree produced + * by {@link PlanAParser}. + * + * @param The return type of the visit operation. Use {@link Void} for + * operations with no return type. + */ +interface PlanAParserVisitor extends ParseTreeVisitor { + /** + * Visit a parse tree produced by {@link PlanAParser#source}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSource(PlanAParser.SourceContext ctx); + /** + * Visit a parse tree produced by the {@code if} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIf(PlanAParser.IfContext ctx); + /** + * Visit a parse tree produced by the {@code while} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitWhile(PlanAParser.WhileContext ctx); + /** + * Visit a parse tree produced by the {@code do} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDo(PlanAParser.DoContext ctx); + /** + * Visit a parse tree produced by the {@code for} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFor(PlanAParser.ForContext ctx); + /** + * Visit a parse tree produced by the {@code decl} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecl(PlanAParser.DeclContext ctx); + /** + * Visit a parse tree produced by the {@code continue} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitContinue(PlanAParser.ContinueContext ctx); + /** + * Visit a parse tree produced by the {@code break} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBreak(PlanAParser.BreakContext ctx); + /** + * Visit a parse tree produced by the {@code return} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitReturn(PlanAParser.ReturnContext ctx); + /** + * Visit a parse tree produced by the {@code try} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTry(PlanAParser.TryContext ctx); + /** + * Visit a parse tree produced by the {@code throw} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitThrow(PlanAParser.ThrowContext ctx); + /** + * Visit a parse tree produced by the {@code expr} + * labeled alternative in {@link PlanAParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExpr(PlanAParser.ExprContext ctx); + /** + * Visit a parse tree produced by the {@code multiple} + * labeled alternative in {@link PlanAParser#block}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMultiple(PlanAParser.MultipleContext ctx); + /** + * Visit a parse tree produced by the {@code single} + * labeled alternative in {@link PlanAParser#block}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSingle(PlanAParser.SingleContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#empty}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmpty(PlanAParser.EmptyContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#initializer}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInitializer(PlanAParser.InitializerContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#afterthought}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAfterthought(PlanAParser.AfterthoughtContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#declaration}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclaration(PlanAParser.DeclarationContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#decltype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDecltype(PlanAParser.DecltypeContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#declvar}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDeclvar(PlanAParser.DeclvarContext ctx); + /** + * Visit a parse tree produced by the {@code comp} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitComp(PlanAParser.CompContext ctx); + /** + * Visit a parse tree produced by the {@code bool} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBool(PlanAParser.BoolContext ctx); + /** + * Visit a parse tree produced by the {@code conditional} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConditional(PlanAParser.ConditionalContext ctx); + /** + * Visit a parse tree produced by the {@code assignment} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitAssignment(PlanAParser.AssignmentContext ctx); + /** + * Visit a parse tree produced by the {@code false} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFalse(PlanAParser.FalseContext ctx); + /** + * Visit a parse tree produced by the {@code numeric} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNumeric(PlanAParser.NumericContext ctx); + /** + * Visit a parse tree produced by the {@code unary} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitUnary(PlanAParser.UnaryContext ctx); + /** + * Visit a parse tree produced by the {@code precedence} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPrecedence(PlanAParser.PrecedenceContext ctx); + /** + * Visit a parse tree produced by the {@code preinc} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPreinc(PlanAParser.PreincContext ctx); + /** + * Visit a parse tree produced by the {@code postinc} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPostinc(PlanAParser.PostincContext ctx); + /** + * Visit a parse tree produced by the {@code cast} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCast(PlanAParser.CastContext ctx); + /** + * Visit a parse tree produced by the {@code external} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExternal(PlanAParser.ExternalContext ctx); + /** + * Visit a parse tree produced by the {@code null} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNull(PlanAParser.NullContext ctx); + /** + * Visit a parse tree produced by the {@code binary} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBinary(PlanAParser.BinaryContext ctx); + /** + * Visit a parse tree produced by the {@code char} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitChar(PlanAParser.CharContext ctx); + /** + * Visit a parse tree produced by the {@code true} + * labeled alternative in {@link PlanAParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrue(PlanAParser.TrueContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extstart}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtstart(PlanAParser.ExtstartContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extprec}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtprec(PlanAParser.ExtprecContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extcast}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtcast(PlanAParser.ExtcastContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extbrace}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtbrace(PlanAParser.ExtbraceContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extdot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtdot(PlanAParser.ExtdotContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#exttype}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExttype(PlanAParser.ExttypeContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extcall}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtcall(PlanAParser.ExtcallContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extvar}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtvar(PlanAParser.ExtvarContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extfield}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtfield(PlanAParser.ExtfieldContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extnew}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtnew(PlanAParser.ExtnewContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#extstring}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtstring(PlanAParser.ExtstringContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#arguments}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitArguments(PlanAParser.ArgumentsContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#increment}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIncrement(PlanAParser.IncrementContext ctx); +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java new file mode 100644 index 00000000000..c893cd38324 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptModule; + +public final class PlanAPlugin extends Plugin { + + @Override + public String name() { + return "lang-plan-a"; + } + + @Override + public String description() { + return "Plan A scripting language for Elasticsearch"; + } + + public void onModule(ScriptModule module) { + module.addScriptEngine(PlanAScriptEngineService.class); + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java new file mode 100644 index 00000000000..6b3cd834715 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.SearchScript; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.Permissions; +import java.security.PrivilegedAction; +import java.security.ProtectionDomain; +import java.util.Map; + +public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { + + public static final String NAME = "plan-a"; + // TODO: this should really be per-script since scripts do so many different things? + private static final CompilerSettings compilerSettings = new CompilerSettings(); + + public static final String NUMERIC_OVERFLOW = "plan-a.numeric_overflow"; + + // TODO: how should custom definitions be specified? + private Definition definition = null; + + @Inject + public PlanAScriptEngineService(Settings settings) { + super(settings); + compilerSettings.setNumericOverflow(settings.getAsBoolean(NUMERIC_OVERFLOW, compilerSettings.getNumericOverflow())); + } + + public void setDefinition(final Definition definition) { + this.definition = new Definition(definition); + } + + @Override + public String[] types() { + return new String[] { NAME }; + } + + @Override + public String[] extensions() { + return new String[] { NAME }; + } + + @Override + public boolean sandboxed() { + return true; + } + + // context used during compilation + private static final AccessControlContext COMPILATION_CONTEXT; + static { + Permissions none = new Permissions(); + none.setReadOnly(); + COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { + new ProtectionDomain(null, none) + }); + } + + @Override + public Object compile(String script) { + // check we ourselves are not being called by unprivileged code + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + // create our loader (which loads compiled code with no permissions) + Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Compiler.Loader run() { + return new Compiler.Loader(getClass().getClassLoader()); + } + }); + // drop all permissions to actually compile the code itself + return AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Executable run() { + return Compiler.compile(loader, "something", script, definition, compilerSettings); + } + }, COMPILATION_CONTEXT); + } + + @Override + public ExecutableScript executable(CompiledScript compiledScript, Map vars) { + return new ScriptImpl((Executable) compiledScript.compiled(), vars, null); + } + + @Override + public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + return new SearchScript() { + @Override + public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { + return new ScriptImpl((Executable) compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context)); + } + + @Override + public boolean needsScores() { + return true; // TODO: maybe even do these different and more like expressions. + } + }; + } + + @Override + public void scriptRemoved(CompiledScript script) { + // nothing to do + } + + @Override + public void close() throws IOException { + // nothing to do + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java new file mode 100644 index 00000000000..3910cdc96f7 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.apache.lucene.search.Scorer; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.ScoreAccessor; +import org.elasticsearch.search.lookup.LeafSearchLookup; + +import java.util.HashMap; +import java.util.Map; + +final class ScriptImpl implements ExecutableScript, LeafSearchScript { + final Executable executable; + final Map variables; + final LeafSearchLookup lookup; + + ScriptImpl(Executable executable, Map vars, LeafSearchLookup lookup) { + this.executable = executable; + this.lookup = lookup; + this.variables = new HashMap<>(); + if (vars != null) { + variables.putAll(vars); + } + if (lookup != null) { + variables.putAll(lookup.asMap()); + } + } + + @Override + public void setNextVar(String name, Object value) { + variables.put(name, value); + } + + @Override + public Object run() { + return executable.execute(variables); + } + + @Override + public float runAsFloat() { + return ((Number) run()).floatValue(); + } + + @Override + public long runAsLong() { + return ((Number) run()).longValue(); + } + + @Override + public double runAsDouble() { + return ((Number) run()).doubleValue(); + } + + @Override + public Object unwrap(Object value) { + return value; + } + + @Override + public void setScorer(Scorer scorer) { + variables.put("_score", new ScoreAccessor(scorer)); + } + + @Override + public void setDocument(int doc) { + if (lookup != null) { + lookup.setDocument(doc); + } + } + + @Override + public void setSource(Map source) { + if (lookup != null) { + lookup.source().setSource(source); + } + } +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java new file mode 100644 index 00000000000..3bb5ae463e7 --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java @@ -0,0 +1,801 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +public class Utility { + public static boolean NumberToboolean(final Number value) { + return value.longValue() != 0; + } + + public static char NumberTochar(final Number value) { + return (char)value.intValue(); + } + + public static Boolean NumberToBoolean(final Number value) { + return value.longValue() != 0; + } + + public static Byte NumberToByte(final Number value) { + return value == null ? null : value.byteValue(); + } + + public static Short NumberToShort(final Number value) { + return value == null ? null : value.shortValue(); + } + + public static Character NumberToCharacter(final Number value) { + return value == null ? null : (char)value.intValue(); + } + + public static Integer NumberToInteger(final Number value) { + return value == null ? null : value.intValue(); + } + + public static Long NumberToLong(final Number value) { + return value == null ? null : value.longValue(); + } + + public static Float NumberToFloat(final Number value) { + return value == null ? null : value.floatValue(); + } + + public static Double NumberToDouble(final Number value) { + return value == null ? null : value.doubleValue(); + } + + public static byte booleanTobyte(final boolean value) { + return (byte)(value ? 1 : 0); + } + + public static short booleanToshort(final boolean value) { + return (short)(value ? 1 : 0); + } + + public static char booleanTochar(final boolean value) { + return (char)(value ? 1 : 0); + } + + public static int booleanToint(final boolean value) { + return value ? 1 : 0; + } + + public static long booleanTolong(final boolean value) { + return value ? 1 : 0; + } + + public static float booleanTofloat(final boolean value) { + return value ? 1 : 0; + } + + public static double booleanTodouble(final boolean value) { + return value ? 1 : 0; + } + + public static Integer booleanToInteger(final boolean value) { + return value ? 1 : 0; + } + + public static byte BooleanTobyte(final Boolean value) { + return (byte)(value ? 1 : 0); + } + + public static short BooleanToshort(final Boolean value) { + return (short)(value ? 1 : 0); + } + + public static char BooleanTochar(final Boolean value) { + return (char)(value ? 1 : 0); + } + + public static int BooleanToint(final Boolean value) { + return value ? 1 : 0; + } + + public static long BooleanTolong(final Boolean value) { + return value ? 1 : 0; + } + + public static float BooleanTofloat(final Boolean value) { + return value ? 1 : 0; + } + + public static double BooleanTodouble(final Boolean value) { + return value ? 1 : 0; + } + + public static Byte BooleanToByte(final Boolean value) { + return value == null ? null : (byte)(value ? 1 : 0); + } + + public static Short BooleanToShort(final Boolean value) { + return value == null ? null : (short)(value ? 1 : 0); + } + + public static Character BooleanToCharacter(final Boolean value) { + return value == null ? null : (char)(value ? 1 : 0); + } + + public static Integer BooleanToInteger(final Boolean value) { + return value == null ? null : value ? 1 : 0; + } + + public static Long BooleanToLong(final Boolean value) { + return value == null ? null : value ? 1L : 0L; + } + + public static Float BooleanToFloat(final Boolean value) { + return value == null ? null : value ? 1F : 0F; + } + + public static Double BooleanToDouble(final Boolean value) { + return value == null ? null : value ? 1D : 0D; + } + + public static boolean byteToboolean(final byte value) { + return value != 0; + } + + public static Short byteToShort(final byte value) { + return (short)value; + } + + public static Character byteToCharacter(final byte value) { + return (char)(byte)value; + } + + public static Integer byteToInteger(final byte value) { + return (int)value; + } + + public static Long byteToLong(final byte value) { + return (long)value; + } + + public static Float byteToFloat(final byte value) { + return (float)value; + } + + public static Double byteToDouble(final byte value) { + return (double)value; + } + + public static boolean ByteToboolean(final Byte value) { + return value != 0; + } + + public static char ByteTochar(final Byte value) { + return (char)value.byteValue(); + } + + public static boolean shortToboolean(final short value) { + return value != 0; + } + + public static Byte shortToByte(final short value) { + return (byte)value; + } + + public static Character shortToCharacter(final short value) { + return (char)(short)value; + } + + public static Integer shortToInteger(final short value) { + return (int)value; + } + + public static Long shortToLong(final short value) { + return (long)value; + } + + public static Float shortToFloat(final short value) { + return (float)value; + } + + public static Double shortToDouble(final short value) { + return (double)value; + } + + public static boolean ShortToboolean(final Short value) { + return value != 0; + } + + public static char ShortTochar(final Short value) { + return (char)value.shortValue(); + } + + public static boolean charToboolean(final char value) { + return value != 0; + } + + public static Byte charToByte(final char value) { + return (byte)value; + } + + public static Short charToShort(final char value) { + return (short)value; + } + + public static Integer charToInteger(final char value) { + return (int)value; + } + + public static Long charToLong(final char value) { + return (long)value; + } + + public static Float charToFloat(final char value) { + return (float)value; + } + + public static Double charToDouble(final char value) { + return (double)value; + } + + public static boolean CharacterToboolean(final Character value) { + return value != 0; + } + + public static byte CharacterTobyte(final Character value) { + return (byte)value.charValue(); + } + + public static short CharacterToshort(final Character value) { + return (short)value.charValue(); + } + + public static int CharacterToint(final Character value) { + return (int)value; + } + + public static long CharacterTolong(final Character value) { + return (long)value; + } + + public static float CharacterTofloat(final Character value) { + return (float)value; + } + + public static double CharacterTodouble(final Character value) { + return (double)value; + } + + public static Boolean CharacterToBoolean(final Character value) { + return value == null ? null : value != 0; + } + + public static Byte CharacterToByte(final Character value) { + return value == null ? null : (byte)value.charValue(); + } + + public static Short CharacterToShort(final Character value) { + return value == null ? null : (short)value.charValue(); + } + + public static Integer CharacterToInteger(final Character value) { + return value == null ? null : (int)value; + } + + public static Long CharacterToLong(final Character value) { + return value == null ? null : (long)value; + } + + public static Float CharacterToFloat(final Character value) { + return value == null ? null : (float)value; + } + + public static Double CharacterToDouble(final Character value) { + return value == null ? null : (double)value; + } + + public static boolean intToboolean(final int value) { + return value != 0; + } + + public static Byte intToByte(final int value) { + return (byte)value; + } + + public static Short intToShort(final int value) { + return (short)value; + } + + public static Character intToCharacter(final int value) { + return (char)(int)value; + } + + public static Long intToLong(final int value) { + return (long)value; + } + + public static Float intToFloat(final int value) { + return (float)value; + } + + public static Double intToDouble(final int value) { + return (double)value; + } + + public static boolean IntegerToboolean(final Integer value) { + return value != 0; + } + + public static char IntegerTochar(final Integer value) { + return (char)value.intValue(); + } + + public static boolean longToboolean(final long value) { + return value != 0; + } + + public static Byte longToByte(final long value) { + return (byte)value; + } + + public static Short longToShort(final long value) { + return (short)value; + } + + public static Character longToCharacter(final long value) { + return (char)(long)value; + } + + public static Integer longToInteger(final long value) { + return (int)value; + } + + public static Float longToFloat(final long value) { + return (float)value; + } + + public static Double longToDouble(final long value) { + return (double)value; + } + + public static boolean LongToboolean(final Long value) { + return value != 0; + } + + public static char LongTochar(final Long value) { + return (char)value.longValue(); + } + + public static boolean floatToboolean(final float value) { + return value != 0; + } + + public static Byte floatToByte(final float value) { + return (byte)value; + } + + public static Short floatToShort(final float value) { + return (short)value; + } + + public static Character floatToCharacter(final float value) { + return (char)(float)value; + } + + public static Integer floatToInteger(final float value) { + return (int)value; + } + + public static Long floatToLong(final float value) { + return (long)value; + } + + public static Double floatToDouble(final float value) { + return (double)value; + } + + public static boolean FloatToboolean(final Float value) { + return value != 0; + } + + public static char FloatTochar(final Float value) { + return (char)value.floatValue(); + } + + public static boolean doubleToboolean(final double value) { + return value != 0; + } + + public static Byte doubleToByte(final double value) { + return (byte)value; + } + + public static Short doubleToShort(final double value) { + return (short)value; + } + + public static Character doubleToCharacter(final double value) { + return (char)(double)value; + } + + public static Integer doubleToInteger(final double value) { + return (int)value; + } + + public static Long doubleToLong(final double value) { + return (long)value; + } + + public static Float doubleToFloat(final double value) { + return (float)value; + } + + public static boolean DoubleToboolean(final Double value) { + return value != 0; + } + + public static char DoubleTochar(final Double value) { + return (char)value.doubleValue(); + } + + // although divide by zero is guaranteed, the special overflow case is not caught. + // its not needed for remainder because it is not possible there. + // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 + + /** + * Integer divide without overflow + * @throws ArithmeticException on overflow or divide-by-zero + */ + public static int divideWithoutOverflow(int x, int y) { + if (x == Integer.MIN_VALUE && y == -1) { + throw new ArithmeticException("integer overflow"); + } + return x / y; + } + + /** + * Long divide without overflow + * @throws ArithmeticException on overflow or divide-by-zero + */ + public static long divideWithoutOverflow(long x, long y) { + if (x == Long.MIN_VALUE && y == -1L) { + throw new ArithmeticException("long overflow"); + } + return x / y; + } + + // byte, short, and char are promoted to int for normal operations, + // so the JDK exact methods are typically used, and the result has a wider range. + // but compound assignments and increment/decrement operators (e.g. byte b = Byte.MAX_VALUE; b++;) + // implicitly cast back to the original type: so these need to be checked against the original range. + + /** + * Like {@link Math#toIntExact(long)} but for byte range. + */ + public static byte toByteExact(int value) { + byte s = (byte) value; + if (s != value) { + throw new ArithmeticException("byte overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for byte range. + */ + public static byte toByteExact(long value) { + byte s = (byte) value; + if (s != value) { + throw new ArithmeticException("byte overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for byte range. + */ + public static byte toByteWithoutOverflow(float value) { + if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { + throw new ArithmeticException("byte overflow"); + } + return (byte)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for byte range. + */ + public static byte toByteWithoutOverflow(double value) { + if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { + throw new ArithmeticException("byte overflow"); + } + return (byte)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for short range. + */ + public static short toShortExact(int value) { + short s = (short) value; + if (s != value) { + throw new ArithmeticException("short overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for short range. + */ + public static short toShortExact(long value) { + short s = (short) value; + if (s != value) { + throw new ArithmeticException("short overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for short range. + */ + public static short toShortWithoutOverflow(float value) { + if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { + throw new ArithmeticException("short overflow"); + } + return (short)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for short range. + */ + public static short toShortExact(double value) { + if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { + throw new ArithmeticException("short overflow"); + } + return (short)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for char range. + */ + public static char toCharExact(int value) { + char s = (char) value; + if (s != value) { + throw new ArithmeticException("char overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for char range. + */ + public static char toCharExact(long value) { + char s = (char) value; + if (s != value) { + throw new ArithmeticException("char overflow"); + } + return s; + } + + /** + * Like {@link Math#toIntExact(long)} but for char range. + */ + public static char toCharWithoutOverflow(float value) { + if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { + throw new ArithmeticException("char overflow"); + } + return (char)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for char range. + */ + public static char toCharWithoutOverflow(double value) { + if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { + throw new ArithmeticException("char overflow"); + } + return (char)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for int range. + */ + public static int toIntWithoutOverflow(float value) { + if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { + throw new ArithmeticException("int overflow"); + } + return (int)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for int range. + */ + public static int toIntWithoutOverflow(double value) { + if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { + throw new ArithmeticException("int overflow"); + } + return (int)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for long range. + */ + public static long toLongExactWithoutOverflow(float value) { + if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { + throw new ArithmeticException("long overflow"); + } + return (long)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for long range. + */ + public static float toLongExactWithoutOverflow(double value) { + if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { + throw new ArithmeticException("long overflow"); + } + return (long)value; + } + + /** + * Like {@link Math#toIntExact(long)} but for float range. + */ + public static float toFloatWithoutOverflow(double value) { + if (value < Float.MIN_VALUE || value > Float.MAX_VALUE) { + throw new ArithmeticException("float overflow"); + } + return (float)value; + } + + /** + * Checks for overflow, result is infinite but operands are finite + * @throws ArithmeticException if overflow occurred + */ + private static float checkInfFloat(float x, float y, float z) { + if (Float.isInfinite(z)) { + if (Float.isFinite(x) && Float.isFinite(y)) { + throw new ArithmeticException("float overflow"); + } + } + return z; + } + + /** + * Checks for NaN, result is NaN but operands are finite + * @throws ArithmeticException if overflow occurred + */ + private static float checkNaNFloat(float x, float y, float z) { + if (Float.isNaN(z)) { + if (Float.isFinite(x) && Float.isFinite(y)) { + throw new ArithmeticException("NaN"); + } + } + return z; + } + + /** + * Checks for NaN, result is infinite but operands are finite + * @throws ArithmeticException if overflow occurred + */ + private static double checkInfDouble(double x, double y, double z) { + if (Double.isInfinite(z)) { + if (Double.isFinite(x) && Double.isFinite(y)) { + throw new ArithmeticException("double overflow"); + } + } + return z; + } + + /** + * Checks for NaN, result is NaN but operands are finite + * @throws ArithmeticException if overflow occurred + */ + private static double checkNaNDouble(double x, double y, double z) { + if (Double.isNaN(z)) { + if (Double.isFinite(x) && Double.isFinite(y)) { + throw new ArithmeticException("NaN"); + } + } + return z; + } + + /** + * Adds two floats but throws {@code ArithmeticException} + * if the result overflows. + */ + public static float addWithoutOverflow(float x, float y) { + return checkInfFloat(x, y, x + y); + } + + /** + * Adds two doubles but throws {@code ArithmeticException} + * if the result overflows. + */ + public static double addWithoutOverflow(double x, double y) { + return checkInfDouble(x, y, x + y); + } + + /** + * Subtracts two floats but throws {@code ArithmeticException} + * if the result overflows. + */ + public static float subtractWithoutOverflow(float x, float y) { + return checkInfFloat(x, y, x - y); + } + + /** + * Subtracts two doubles but throws {@code ArithmeticException} + * if the result overflows. + */ + public static double subtractWithoutOverflow(double x, double y) { + return checkInfDouble(x, y , x - y); + } + + /** + * Multiplies two floats but throws {@code ArithmeticException} + * if the result overflows. + */ + public static float multiplyWithoutOverflow(float x, float y) { + return checkInfFloat(x, y, x * y); + } + + /** + * Multiplies two doubles but throws {@code ArithmeticException} + * if the result overflows. + */ + public static double multiplyWithoutOverflow(double x, double y) { + return checkInfDouble(x, y, x * y); + } + + /** + * Divides two floats but throws {@code ArithmeticException} + * if the result overflows, or would create NaN from finite + * inputs ({@code x == 0, y == 0}) + */ + public static float divideWithoutOverflow(float x, float y) { + return checkNaNFloat(x, y, checkInfFloat(x, y, x / y)); + } + + /** + * Divides two doubles but throws {@code ArithmeticException} + * if the result overflows, or would create NaN from finite + * inputs ({@code x == 0, y == 0}) + */ + public static double divideWithoutOverflow(double x, double y) { + return checkNaNDouble(x, y, checkInfDouble(x, y, x / y)); + } + + /** + * Takes remainder two floats but throws {@code ArithmeticException} + * if the result would create NaN from finite inputs ({@code y == 0}) + */ + public static float remainderWithoutOverflow(float x, float y) { + return checkNaNFloat(x, y, x % y); + } + + /** + * Divides two doubles but throws {@code ArithmeticException} + * if the result would create NaN from finite inputs ({@code y == 0}) + */ + public static double remainderWithoutOverflow(double x, double y) { + return checkNaNDouble(x, y, x % y); + } + + public static boolean checkEquals(final Object left, final Object right) { + if (left != null && right != null) { + return left.equals(right); + } + + return left == null && right == null; + } + + private Utility() {} +} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java new file mode 100644 index 00000000000..3756e02f8dc --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java @@ -0,0 +1,2224 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ParseTree; +import org.objectweb.asm.ClassWriter; +import org.objectweb.asm.Label; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.commons.GeneratorAdapter; + +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.plan.a.Adapter.*; +import static org.elasticsearch.plan.a.Definition.*; +import static org.elasticsearch.plan.a.PlanAParser.*; + +class Writer extends PlanAParserBaseVisitor { + private static class Branch { + final ParserRuleContext source; + + Label begin; + Label end; + Label tru; + Label fals; + + private Branch(final ParserRuleContext source) { + this.source = source; + + begin = null; + end = null; + tru = null; + fals = null; + } + } + + final static String BASE_CLASS_NAME = Executable.class.getName(); + final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPlanAExecutable"; + private final static org.objectweb.asm.Type BASE_CLASS_TYPE = org.objectweb.asm.Type.getType(Executable.class); + private final static org.objectweb.asm.Type CLASS_TYPE = + org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + + private final static org.objectweb.asm.commons.Method CONSTRUCTOR = org.objectweb.asm.commons.Method.getMethod( + "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); + private final static org.objectweb.asm.commons.Method EXECUTE = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object execute(java.util.Map)"); + private final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; + + private final static org.objectweb.asm.Type DEFINITION_TYPE = org.objectweb.asm.Type.getType(Definition.class); + + private final static org.objectweb.asm.commons.Method DEF_METHOD_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + + "org.elasticsearch.plan.a.Definition, java.lang.Object[], boolean[])"); + private final static org.objectweb.asm.commons.Method DEF_ARRAY_STORE = org.objectweb.asm.commons.Method.getMethod( + "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + + "org.elasticsearch.plan.a.Definition, boolean, boolean)"); + private final static org.objectweb.asm.commons.Method DEF_ARRAY_LOAD = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + + "org.elasticsearch.plan.a.Definition, boolean)"); + private final static org.objectweb.asm.commons.Method DEF_FIELD_STORE = org.objectweb.asm.commons.Method.getMethod( + "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + + "org.elasticsearch.plan.a.Definition, boolean)"); + private final static org.objectweb.asm.commons.Method DEF_FIELD_LOAD = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); + + private final static org.objectweb.asm.commons.Method DEF_NOT_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object not(java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_NEG_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object neg(java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_MUL_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object mul(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_DIV_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object div(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_REM_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object rem(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_ADD_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object add(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_SUB_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object sub(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_LSH_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object lsh(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_RSH_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object rsh(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_USH_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object ush(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_AND_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object and(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_XOR_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object xor(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_OR_CALL = org.objectweb.asm.commons.Method.getMethod( + "java.lang.Object or(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_EQ_CALL = org.objectweb.asm.commons.Method.getMethod( + "boolean eq(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_LT_CALL = org.objectweb.asm.commons.Method.getMethod( + "boolean lt(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_LTE_CALL = org.objectweb.asm.commons.Method.getMethod( + "boolean lte(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_GT_CALL = org.objectweb.asm.commons.Method.getMethod( + "boolean gt(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method DEF_GTE_CALL = org.objectweb.asm.commons.Method.getMethod( + "boolean gte(java.lang.Object, java.lang.Object)"); + + private final static org.objectweb.asm.Type STRINGBUILDER_TYPE = org.objectweb.asm.Type.getType(StringBuilder.class); + + private final static org.objectweb.asm.commons.Method STRINGBUILDER_CONSTRUCTOR = + org.objectweb.asm.commons.Method.getMethod("void ()"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_BOOLEAN = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(boolean)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_CHAR = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(char)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_INT = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(int)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_LONG = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(long)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_FLOAT = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(float)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(double)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_STRING = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.String)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_OBJECT = + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.Object)"); + private final static org.objectweb.asm.commons.Method STRINGBUILDER_TOSTRING = + org.objectweb.asm.commons.Method.getMethod("java.lang.String toString()"); + + private final static org.objectweb.asm.commons.Method TOINTEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("int toIntExact(long)"); + private final static org.objectweb.asm.commons.Method NEGATEEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("int negateExact(int)"); + private final static org.objectweb.asm.commons.Method NEGATEEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("long negateExact(long)"); + private final static org.objectweb.asm.commons.Method MULEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("int multiplyExact(int, int)"); + private final static org.objectweb.asm.commons.Method MULEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("long multiplyExact(long, long)"); + private final static org.objectweb.asm.commons.Method ADDEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("int addExact(int, int)"); + private final static org.objectweb.asm.commons.Method ADDEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("long addExact(long, long)"); + private final static org.objectweb.asm.commons.Method SUBEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("int subtractExact(int, int)"); + private final static org.objectweb.asm.commons.Method SUBEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("long subtractExact(long, long)"); + + private final static org.objectweb.asm.commons.Method CHECKEQUALS = + org.objectweb.asm.commons.Method.getMethod("boolean checkEquals(java.lang.Object, java.lang.Object)"); + private final static org.objectweb.asm.commons.Method TOBYTEEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(int)"); + private final static org.objectweb.asm.commons.Method TOBYTEEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(long)"); + private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(float)"); + private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method TOSHORTEXACT_INT = + org.objectweb.asm.commons.Method.getMethod("short toShortExact(int)"); + private final static org.objectweb.asm.commons.Method TOSHORTEXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("short toShortExact(long)"); + private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(float)"); + private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method TOCHAREXACT_INT = + org.objectweb.asm.commons.Method.getMethod("char toCharExact(int)"); + private final static org.objectweb.asm.commons.Method TOCHAREXACT_LONG = + org.objectweb.asm.commons.Method.getMethod("char toCharExact(long)"); + private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(float)"); + private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(float)"); + private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(float)"); + private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method TOFLOATWOOVERFLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("float toFloatWithoutOverflow(double)"); + private final static org.objectweb.asm.commons.Method MULWOOVERLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float multiplyWithoutOverflow(float, float)"); + private final static org.objectweb.asm.commons.Method MULWOOVERLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("double multiplyWithoutOverflow(double, double)"); + private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_INT = + org.objectweb.asm.commons.Method.getMethod("int divideWithoutOverflow(int, int)"); + private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_LONG = + org.objectweb.asm.commons.Method.getMethod("long divideWithoutOverflow(long, long)"); + private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float divideWithoutOverflow(float, float)"); + private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("double divideWithoutOverflow(double, double)"); + private final static org.objectweb.asm.commons.Method REMWOOVERLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float remainderWithoutOverflow(float, float)"); + private final static org.objectweb.asm.commons.Method REMWOOVERLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("double remainderWithoutOverflow(double, double)"); + private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float addWithoutOverflow(float, float)"); + private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("double addWithoutOverflow(double, double)"); + private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float subtractWithoutOverflow(float, float)"); + private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_DOUBLE = + org.objectweb.asm.commons.Method.getMethod("double subtractWithoutOverflow(double, double)"); + + static byte[] write(Adapter adapter) { + Writer writer = new Writer(adapter); + + return writer.getBytes(); + } + + private final Adapter adapter; + private final Definition definition; + private final ParseTree root; + private final String source; + private final CompilerSettings settings; + + private final Map branches; + private final Deque jumps; + private final Set strings; + + private ClassWriter writer; + private GeneratorAdapter execute; + + private Writer(final Adapter adapter) { + this.adapter = adapter; + definition = adapter.definition; + root = adapter.root; + source = adapter.source; + settings = adapter.settings; + + branches = new HashMap<>(); + jumps = new ArrayDeque<>(); + strings = new HashSet<>(); + + writeBegin(); + writeConstructor(); + writeExecute(); + writeEnd(); + } + + private Branch markBranch(final ParserRuleContext source, final ParserRuleContext... nodes) { + final Branch branch = new Branch(source); + + for (final ParserRuleContext node : nodes) { + branches.put(node, branch); + } + + return branch; + } + + private void copyBranch(final Branch branch, final ParserRuleContext... nodes) { + for (final ParserRuleContext node : nodes) { + branches.put(node, branch); + } + } + + private Branch getBranch(final ParserRuleContext source) { + return branches.get(source); + } + + private void writeBegin() { + final int compute = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; + final int version = Opcodes.V1_7; + final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL | Opcodes.ACC_SYNTHETIC; + final String base = BASE_CLASS_TYPE.getInternalName(); + final String name = CLASS_TYPE.getInternalName(); + + writer = new ClassWriter(compute); + writer.visit(version, access, name, null, base, null); + writer.visitSource(source, null); + } + + private void writeConstructor() { + final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; + final GeneratorAdapter constructor = new GeneratorAdapter(access, CONSTRUCTOR, null, null, writer); + constructor.loadThis(); + constructor.loadArgs(); + constructor.invokeConstructor(org.objectweb.asm.Type.getType(Executable.class), CONSTRUCTOR); + constructor.returnValue(); + constructor.endMethod(); + } + + private void writeExecute() { + final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; + execute = new GeneratorAdapter(access, EXECUTE, SIGNATURE, null, writer); + visit(root); + execute.endMethod(); + } + + @Override + public Void visitSource(final SourceContext ctx) { + final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + + for (final StatementContext sctx : ctx.statement()) { + visit(sctx); + } + + if (!sourcesmd.allReturn) { + execute.visitInsn(Opcodes.ACONST_NULL); + execute.returnValue(); + } + + return null; + } + + @Override + public Void visitIf(final IfContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final boolean els = ctx.ELSE() != null; + final Branch branch = markBranch(ctx, exprctx); + branch.end = new Label(); + branch.fals = els ? new Label() : branch.end; + + visit(exprctx); + + final BlockContext blockctx0 = ctx.block(0); + final StatementMetadata blockmd0 = adapter.getStatementMetadata(blockctx0); + visit(blockctx0); + + if (els) { + if (!blockmd0.allExit) { + execute.goTo(branch.end); + } + + execute.mark(branch.fals); + visit(ctx.block(1)); + } + + execute.mark(branch.end); + + return null; + } + + @Override + public Void visitWhile(final WhileContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final Branch branch = markBranch(ctx, exprctx); + branch.begin = new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + jumps.push(branch); + execute.mark(branch.begin); + visit(exprctx); + + final BlockContext blockctx = ctx.block(); + boolean allexit = false; + + if (blockctx != null) { + StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); + allexit = blocksmd.allExit; + visit(blockctx); + } + + if (!allexit) { + execute.goTo(branch.begin); + } + + execute.mark(branch.end); + jumps.pop(); + + return null; + } + + @Override + public Void visitDo(final DoContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final Branch branch = markBranch(ctx, exprctx); + branch.begin = new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + jumps.push(branch); + execute.mark(branch.begin); + + final BlockContext bctx = ctx.block(); + final StatementMetadata blocksmd = adapter.getStatementMetadata(bctx); + visit(bctx); + + visit(exprctx); + + if (!blocksmd.allExit) { + execute.goTo(branch.begin); + } + + execute.mark(branch.end); + jumps.pop(); + + return null; + } + + @Override + public Void visitFor(final ForContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final AfterthoughtContext atctx = ctx.afterthought(); + final Branch branch = markBranch(ctx, exprctx); + final Label start = new Label(); + branch.begin = atctx == null ? start : new Label(); + branch.end = new Label(); + branch.fals = branch.end; + + jumps.push(branch); + + if (ctx.initializer() != null) { + visit(ctx.initializer()); + } + + execute.mark(start); + + if (exprctx != null) { + visit(exprctx); + } + + final BlockContext blockctx = ctx.block(); + boolean allexit = false; + + if (blockctx != null) { + StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); + allexit = blocksmd.allExit; + visit(blockctx); + } + + if (atctx != null) { + execute.mark(branch.begin); + visit(atctx); + } + + if (atctx != null || !allexit) { + execute.goTo(start); + } + + execute.mark(branch.end); + jumps.pop(); + + return null; + } + + @Override + public Void visitDecl(final DeclContext ctx) { + visit(ctx.declaration()); + + return null; + } + + @Override + public Void visitContinue(final ContinueContext ctx) { + final Branch jump = jumps.peek(); + execute.goTo(jump.begin); + + return null; + } + + @Override + public Void visitBreak(final BreakContext ctx) { + final Branch jump = jumps.peek(); + execute.goTo(jump.end); + + return null; + } + + @Override + public Void visitReturn(final ReturnContext ctx) { + visit(ctx.expression()); + execute.returnValue(); + + return null; + } + + @Override + public Void visitExpr(final ExprContext ctx) { + final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); + final ExpressionContext exprctx = ctx.expression(); + final ExpressionMetadata expremd = adapter.getExpressionMetadata(exprctx); + visit(exprctx); + + if (exprsmd.allReturn) { + execute.returnValue(); + } else { + writePop(expremd.to.type.getSize()); + } + + return null; + } + + @Override + public Void visitMultiple(final MultipleContext ctx) { + for (final StatementContext sctx : ctx.statement()) { + visit(sctx); + } + + return null; + } + + @Override + public Void visitSingle(final SingleContext ctx) { + visit(ctx.statement()); + + return null; + } + + @Override + public Void visitEmpty(final EmptyContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + } + + @Override + public Void visitInitializer(InitializerContext ctx) { + final DeclarationContext declctx = ctx.declaration(); + final ExpressionContext exprctx = ctx.expression(); + + if (declctx != null) { + visit(declctx); + } else if (exprctx != null) { + visit(exprctx); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + return null; + } + + @Override + public Void visitAfterthought(AfterthoughtContext ctx) { + visit(ctx.expression()); + + return null; + } + + @Override + public Void visitDeclaration(DeclarationContext ctx) { + for (final DeclvarContext declctx : ctx.declvar()) { + visit(declctx); + } + + return null; + } + + @Override + public Void visitDecltype(final DecltypeContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + } + + @Override + public Void visitDeclvar(final DeclvarContext ctx) { + final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + final org.objectweb.asm.Type type = declvaremd.to.type; + final Sort sort = declvaremd.to.sort; + final int slot = (int)declvaremd.postConst; + + final ExpressionContext exprctx = ctx.expression(); + final boolean initialize = exprctx == null; + + if (!initialize) { + visit(exprctx); + } + + switch (sort) { + case VOID: throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + case BOOL: + case BYTE: + case SHORT: + case CHAR: + case INT: if (initialize) execute.push(0); break; + case LONG: if (initialize) execute.push(0L); break; + case FLOAT: if (initialize) execute.push(0.0F); break; + case DOUBLE: if (initialize) execute.push(0.0); break; + default: if (initialize) execute.visitInsn(Opcodes.ACONST_NULL); + } + + execute.visitVarInsn(type.getOpcode(Opcodes.ISTORE), slot); + + return null; + } + + @Override + public Void visitPrecedence(final PrecedenceContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + } + + @Override + public Void visitNumeric(final NumericContext ctx) { + final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final Object postConst = numericemd.postConst; + + if (postConst == null) { + writeNumeric(ctx, numericemd.preConst); + checkWriteCast(numericemd); + } else { + writeConstant(ctx, postConst); + } + + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitChar(final CharContext ctx) { + final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + final Object postConst = charemd.postConst; + + if (postConst == null) { + writeNumeric(ctx, (int)(char)charemd.preConst); + checkWriteCast(charemd); + } else { + writeConstant(ctx, postConst); + } + + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitTrue(final TrueContext ctx) { + final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + final Object postConst = trueemd.postConst; + final Branch branch = getBranch(ctx); + + if (branch == null) { + if (postConst == null) { + writeBoolean(ctx, true); + checkWriteCast(trueemd); + } else { + writeConstant(ctx, postConst); + } + } else if (branch.tru != null) { + execute.goTo(branch.tru); + } + + return null; + } + + @Override + public Void visitFalse(final FalseContext ctx) { + final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + final Object postConst = falseemd.postConst; + final Branch branch = getBranch(ctx); + + if (branch == null) { + if (postConst == null) { + writeBoolean(ctx, false); + checkWriteCast(falseemd); + } else { + writeConstant(ctx, postConst); + } + } else if (branch.fals != null) { + execute.goTo(branch.fals); + } + + return null; + } + + @Override + public Void visitNull(final NullContext ctx) { + final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + + execute.visitInsn(Opcodes.ACONST_NULL); + checkWriteCast(nullemd); + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitExternal(final ExternalContext ctx) { + final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + visit(ctx.extstart()); + checkWriteCast(expremd); + checkWriteBranch(ctx); + + return null; + } + + + @Override + public Void visitPostinc(final PostincContext ctx) { + final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + visit(ctx.extstart()); + checkWriteCast(expremd); + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitPreinc(final PreincContext ctx) { + final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + visit(ctx.extstart()); + checkWriteCast(expremd); + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitUnary(final UnaryContext ctx) { + final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + final Object postConst = unaryemd.postConst; + final Object preConst = unaryemd.preConst; + final Branch branch = getBranch(ctx); + + if (postConst != null) { + if (ctx.BOOLNOT() != null) { + if (branch == null) { + writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.goTo(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.goTo(branch.fals); + } + } + } else { + writeConstant(ctx, postConst); + checkWriteBranch(ctx); + } + } else if (preConst != null) { + if (branch == null) { + writeConstant(ctx, preConst); + checkWriteCast(unaryemd); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } else { + final ExpressionContext exprctx = ctx.expression(); + + if (ctx.BOOLNOT() != null) { + final Branch local = markBranch(ctx, exprctx); + + if (branch == null) { + local.fals = new Label(); + final Label aend = new Label(); + + visit(exprctx); + + execute.push(false); + execute.goTo(aend); + execute.mark(local.fals); + execute.push(true); + execute.mark(aend); + + checkWriteCast(unaryemd); + } else { + local.tru = branch.fals; + local.fals = branch.tru; + + visit(exprctx); + } + } else { + final org.objectweb.asm.Type type = unaryemd.from.type; + final Sort sort = unaryemd.from.sort; + + visit(exprctx); + + if (ctx.BWNOT() != null) { + if (sort == Sort.DEF) { + execute.invokeStatic(definition.defobjType.type, DEF_NOT_CALL); + } else { + if (sort == Sort.INT) { + writeConstant(ctx, -1); + } else if (sort == Sort.LONG) { + writeConstant(ctx, -1L); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + execute.math(GeneratorAdapter.XOR, type); + } + } else if (ctx.SUB() != null) { + if (sort == Sort.DEF) { + execute.invokeStatic(definition.defobjType.type, DEF_NEG_CALL); + } else { + if (settings.getNumericOverflow()) { + execute.math(GeneratorAdapter.NEG, type); + } else { + if (sort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, NEGATEEXACT_INT); + } else if (sort == Sort.LONG) { + execute.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } + } + } else if (ctx.ADD() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + checkWriteCast(unaryemd); + checkWriteBranch(ctx); + } + } + + return null; + } + + @Override + public Void visitCast(final CastContext ctx) { + final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + final Object postConst = castemd.postConst; + + if (postConst == null) { + visit(ctx.expression()); + checkWriteCast(castemd); + } else { + writeConstant(ctx, postConst); + } + + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitBinary(final BinaryContext ctx) { + final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + final Object postConst = binaryemd.postConst; + final Object preConst = binaryemd.preConst; + final Branch branch = getBranch(ctx); + + if (postConst != null) { + writeConstant(ctx, postConst); + } else if (preConst != null) { + if (branch == null) { + writeConstant(ctx, preConst); + checkWriteCast(binaryemd); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } else if (binaryemd.from.sort == Sort.STRING) { + final boolean marked = strings.contains(ctx); + + if (!marked) { + writeNewStrings(); + } + + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + strings.add(exprctx0); + visit(exprctx0); + + if (strings.contains(exprctx0)) { + writeAppendStrings(expremd0.from.sort); + strings.remove(exprctx0); + } + + final ExpressionContext exprctx1 = ctx.expression(1); + final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + strings.add(exprctx1); + visit(exprctx1); + + if (strings.contains(exprctx1)) { + writeAppendStrings(expremd1.from.sort); + strings.remove(exprctx1); + } + + if (marked) { + strings.remove(ctx); + } else { + writeToStrings(); + } + + checkWriteCast(binaryemd); + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionContext exprctx1 = ctx.expression(1); + + visit(exprctx0); + visit(exprctx1); + + final Type type = binaryemd.from; + + if (ctx.MUL() != null) writeBinaryInstruction(ctx, type, MUL); + else if (ctx.DIV() != null) writeBinaryInstruction(ctx, type, DIV); + else if (ctx.REM() != null) writeBinaryInstruction(ctx, type, REM); + else if (ctx.ADD() != null) writeBinaryInstruction(ctx, type, ADD); + else if (ctx.SUB() != null) writeBinaryInstruction(ctx, type, SUB); + else if (ctx.LSH() != null) writeBinaryInstruction(ctx, type, LSH); + else if (ctx.USH() != null) writeBinaryInstruction(ctx, type, USH); + else if (ctx.RSH() != null) writeBinaryInstruction(ctx, type, RSH); + else if (ctx.BWAND() != null) writeBinaryInstruction(ctx, type, BWAND); + else if (ctx.BWXOR() != null) writeBinaryInstruction(ctx, type, BWXOR); + else if (ctx.BWOR() != null) writeBinaryInstruction(ctx, type, BWOR); + else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + checkWriteCast(binaryemd); + } + + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitComp(final CompContext ctx) { + final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final Object postConst = compemd.postConst; + final Object preConst = compemd.preConst; + final Branch branch = getBranch(ctx); + + if (postConst != null) { + if (branch == null) { + writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.mark(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.mark(branch.fals); + } + } + } else if (preConst != null) { + if (branch == null) { + writeConstant(ctx, preConst); + checkWriteCast(compemd); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + + final ExpressionContext exprctx1 = ctx.expression(1); + final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + final org.objectweb.asm.Type type = expremd1.to.type; + final Sort sort1 = expremd1.to.sort; + + visit(exprctx0); + + if (!expremd1.isNull) { + visit(exprctx1); + } + + final boolean tru = branch != null && branch.tru != null; + final boolean fals = branch != null && branch.fals != null; + final Label jump = tru ? branch.tru : fals ? branch.fals : new Label(); + final Label end = new Label(); + + final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) || + (ctx.NE() != null || ctx.NER() != null) && fals; + final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) || + (ctx.EQ() != null || ctx.EQR() != null) && fals; + final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals; + final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals; + final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals; + final boolean gte = ctx.GTE() != null && (tru || !fals) || ctx.LT() != null && fals; + + boolean writejump = true; + + switch (sort1) { + case VOID: + case BYTE: + case SHORT: + case CHAR: + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + case BOOL: + if (eq) execute.ifZCmp(GeneratorAdapter.EQ, jump); + else if (ne) execute.ifZCmp(GeneratorAdapter.NE, jump); + else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + break; + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (eq) execute.ifCmp(type, GeneratorAdapter.EQ, jump); + else if (ne) execute.ifCmp(type, GeneratorAdapter.NE, jump); + else if (lt) execute.ifCmp(type, GeneratorAdapter.LT, jump); + else if (lte) execute.ifCmp(type, GeneratorAdapter.LE, jump); + else if (gt) execute.ifCmp(type, GeneratorAdapter.GT, jump); + else if (gte) execute.ifCmp(type, GeneratorAdapter.GE, jump); + else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + break; + case DEF: + if (eq) { + if (expremd1.isNull) { + execute.ifNull(jump); + } else if (!expremd0.isNull && ctx.EQ() != null) { + execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + } else { + execute.ifCmp(type, GeneratorAdapter.EQ, jump); + } + } else if (ne) { + if (expremd1.isNull) { + execute.ifNonNull(jump); + } else if (!expremd0.isNull && ctx.NE() != null) { + execute.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + execute.ifZCmp(GeneratorAdapter.EQ, jump); + } else { + execute.ifCmp(type, GeneratorAdapter.NE, jump); + } + } else if (lt) { + execute.invokeStatic(definition.defobjType.type, DEF_LT_CALL); + } else if (lte) { + execute.invokeStatic(definition.defobjType.type, DEF_LTE_CALL); + } else if (gt) { + execute.invokeStatic(definition.defobjType.type, DEF_GT_CALL); + } else if (gte) { + execute.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + writejump = expremd1.isNull || ne || ctx.EQR() != null; + + if (branch != null && !writejump) { + execute.ifZCmp(GeneratorAdapter.NE, jump); + } + + break; + default: + if (eq) { + if (expremd1.isNull) { + execute.ifNull(jump); + } else if (ctx.EQ() != null) { + execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); + + if (branch != null) { + execute.ifZCmp(GeneratorAdapter.NE, jump); + } + + writejump = false; + } else { + execute.ifCmp(type, GeneratorAdapter.EQ, jump); + } + } else if (ne) { + if (expremd1.isNull) { + execute.ifNonNull(jump); + } else if (ctx.NE() != null) { + execute.invokeStatic(definition.utilityType.type, CHECKEQUALS); + execute.ifZCmp(GeneratorAdapter.EQ, jump); + } else { + execute.ifCmp(type, GeneratorAdapter.NE, jump); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } + + if (branch == null) { + if (writejump) { + execute.push(false); + execute.goTo(end); + execute.mark(jump); + execute.push(true); + execute.mark(end); + } + + checkWriteCast(compemd); + } + } + + return null; + } + + @Override + public Void visitBool(final BoolContext ctx) { + final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + final Object postConst = boolemd.postConst; + final Object preConst = boolemd.preConst; + final Branch branch = getBranch(ctx); + + if (postConst != null) { + if (branch == null) { + writeConstant(ctx, postConst); + } else { + if ((boolean)postConst && branch.tru != null) { + execute.mark(branch.tru); + } else if (!(boolean)postConst && branch.fals != null) { + execute.mark(branch.fals); + } + } + } else if (preConst != null) { + if (branch == null) { + writeConstant(ctx, preConst); + checkWriteCast(boolemd); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } else { + final ExpressionContext exprctx0 = ctx.expression(0); + final ExpressionContext exprctx1 = ctx.expression(1); + + if (branch == null) { + if (ctx.BOOLAND() != null) { + final Branch local = markBranch(ctx, exprctx0, exprctx1); + local.fals = new Label(); + final Label end = new Label(); + + visit(exprctx0); + visit(exprctx1); + + execute.push(true); + execute.goTo(end); + execute.mark(local.fals); + execute.push(false); + execute.mark(end); + } else if (ctx.BOOLOR() != null) { + final Branch branch0 = markBranch(ctx, exprctx0); + branch0.tru = new Label(); + final Branch branch1 = markBranch(ctx, exprctx1); + branch1.fals = new Label(); + final Label aend = new Label(); + + visit(exprctx0); + visit(exprctx1); + + execute.mark(branch0.tru); + execute.push(true); + execute.goTo(aend); + execute.mark(branch1.fals); + execute.push(false); + execute.mark(aend); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + checkWriteCast(boolemd); + } else { + if (ctx.BOOLAND() != null) { + final Branch branch0 = markBranch(ctx, exprctx0); + branch0.fals = branch.fals == null ? new Label() : branch.fals; + final Branch branch1 = markBranch(ctx, exprctx1); + branch1.tru = branch.tru; + branch1.fals = branch.fals; + + visit(exprctx0); + visit(exprctx1); + + if (branch.fals == null) { + execute.mark(branch0.fals); + } + } else if (ctx.BOOLOR() != null) { + final Branch branch0 = markBranch(ctx, exprctx0); + branch0.tru = branch.tru == null ? new Label() : branch.tru; + final Branch branch1 = markBranch(ctx, exprctx1); + branch1.tru = branch.tru; + branch1.fals = branch.fals; + + visit(exprctx0); + visit(exprctx1); + + if (branch.tru == null) { + execute.mark(branch0.tru); + } + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + } + } + + return null; + } + + @Override + public Void visitConditional(final ConditionalContext ctx) { + final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + final Branch branch = getBranch(ctx); + + final ExpressionContext expr0 = ctx.expression(0); + final ExpressionContext expr1 = ctx.expression(1); + final ExpressionContext expr2 = ctx.expression(2); + + final Branch local = markBranch(ctx, expr0); + local.fals = new Label(); + local.end = new Label(); + + if (branch != null) { + copyBranch(branch, expr1, expr2); + } + + visit(expr0); + visit(expr1); + execute.goTo(local.end); + execute.mark(local.fals); + visit(expr2); + execute.mark(local.end); + + if (branch == null) { + checkWriteCast(condemd); + } + + return null; + } + + @Override + public Void visitAssignment(final AssignmentContext ctx) { + final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + visit(ctx.extstart()); + checkWriteCast(expremd); + checkWriteBranch(ctx); + + return null; + } + + @Override + public Void visitExtstart(ExtstartContext ctx) { + final ExternalMetadata startemd = adapter.getExternalMetadata(ctx); + + if (startemd.token == ADD) { + final ExpressionMetadata storeemd = adapter.getExpressionMetadata(startemd.storeExpr); + + if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { + writeNewStrings(); + strings.add(startemd.storeExpr); + } + } + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + visit(precctx); + } else if (castctx != null) { + visit(castctx); + } else if (typectx != null) { + visit(typectx); + } else if (varctx != null) { + visit(varctx); + } else if (newctx != null) { + visit(newctx); + } else if (stringctx != null) { + visit(stringctx); + } else { + throw new IllegalStateException(); + } + + return null; + } + + @Override + public Void visitExtprec(final ExtprecContext ctx) { + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + visit(precctx); + } else if (castctx != null) { + visit(castctx); + } else if (typectx != null) { + visit(typectx); + } else if (varctx != null) { + visit(varctx); + } else if (newctx != null) { + visit(newctx); + } else if (stringctx != null) { + visit(stringctx); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtcast(final ExtcastContext ctx) { + ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + + final ExtprecContext precctx = ctx.extprec(); + final ExtcastContext castctx = ctx.extcast(); + final ExttypeContext typectx = ctx.exttype(); + final ExtvarContext varctx = ctx.extvar(); + final ExtnewContext newctx = ctx.extnew(); + final ExtstringContext stringctx = ctx.extstring(); + + if (precctx != null) { + visit(precctx); + } else if (castctx != null) { + visit(castctx); + } else if (typectx != null) { + visit(typectx); + } else if (varctx != null) { + visit(varctx); + } else if (newctx != null) { + visit(newctx); + } else if (stringctx != null) { + visit(stringctx); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + checkWriteCast(ctx, castenmd.castTo); + + return null; + } + + @Override + public Void visitExtbrace(final ExtbraceContext ctx) { + final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + + visit(exprctx); + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtdot(final ExtdotContext ctx) { + final ExtcallContext callctx = ctx.extcall(); + final ExtfieldContext fieldctx = ctx.extfield(); + + if (callctx != null) { + visit(callctx); + } else if (fieldctx != null) { + visit(fieldctx); + } + + return null; + } + + @Override + public Void visitExttype(final ExttypeContext ctx) { + visit(ctx.extdot()); + + return null; + } + + @Override + public Void visitExtcall(final ExtcallContext ctx) { + writeCallExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtvar(final ExtvarContext ctx) { + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtfield(final ExtfieldContext ctx) { + writeLoadStoreExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtnew(ExtnewContext ctx) { + writeNewExternal(ctx); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitExtstring(ExtstringContext ctx) { + final ExtNodeMetadata stringenmd = adapter.getExtNodeMetadata(ctx); + + writeConstant(ctx, stringenmd.target); + + final ExtdotContext dotctx = ctx.extdot(); + final ExtbraceContext bracectx = ctx.extbrace(); + + if (dotctx != null) { + visit(dotctx); + } else if (bracectx != null) { + visit(bracectx); + } + + return null; + } + + @Override + public Void visitArguments(final ArgumentsContext ctx) { + throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); + } + + @Override + public Void visitIncrement(IncrementContext ctx) { + final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final Object postConst = incremd.postConst; + + if (postConst == null) { + writeNumeric(ctx, incremd.preConst); + checkWriteCast(incremd); + } else { + writeConstant(ctx, postConst); + } + + checkWriteBranch(ctx); + + return null; + } + + private void writeConstant(final ParserRuleContext source, final Object constant) { + if (constant instanceof Number) { + writeNumeric(source, constant); + } else if (constant instanceof Character) { + writeNumeric(source, (int)(char)constant); + } else if (constant instanceof String) { + writeString(source, constant); + } else if (constant instanceof Boolean) { + writeBoolean(source, constant); + } else if (constant != null) { + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } + + private void writeNumeric(final ParserRuleContext source, final Object numeric) { + if (numeric instanceof Double) { + execute.push((double)numeric); + } else if (numeric instanceof Float) { + execute.push((float)numeric); + } else if (numeric instanceof Long) { + execute.push((long)numeric); + } else if (numeric instanceof Number) { + execute.push(((Number)numeric).intValue()); + } else { + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } + + private void writeString(final ParserRuleContext source, final Object string) { + if (string instanceof String) { + execute.push((String)string); + } else { + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } + + private void writeBoolean(final ParserRuleContext source, final Object bool) { + if (bool instanceof Boolean) { + execute.push((boolean)bool); + } else { + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } + + private void writeNewStrings() { + execute.newInstance(STRINGBUILDER_TYPE); + execute.dup(); + execute.invokeConstructor(STRINGBUILDER_TYPE, STRINGBUILDER_CONSTRUCTOR); + } + + private void writeAppendStrings(final Sort sort) { + switch (sort) { + case BOOL: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_BOOLEAN); break; + case CHAR: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_CHAR); break; + case BYTE: + case SHORT: + case INT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_INT); break; + case LONG: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_LONG); break; + case FLOAT: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_FLOAT); break; + case DOUBLE: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_DOUBLE); break; + case STRING: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_STRING); break; + default: execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_APPEND_OBJECT); + } + } + + private void writeToStrings() { + execute.invokeVirtual(STRINGBUILDER_TYPE, STRINGBUILDER_TOSTRING); + } + + private void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) { + final Sort sort = type.sort; + final boolean exact = !settings.getNumericOverflow() && + ((sort == Sort.INT || sort == Sort.LONG) && + (token == MUL || token == DIV || token == ADD || token == SUB) || + (sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); + + // if its a 64-bit shift, fixup the last argument to truncate to 32-bits + // note unlike java, this means we still do binary promotion of shifts, + // but it keeps things simple -- this check works because we promote shifts. + if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) { + execute.cast(org.objectweb.asm.Type.LONG_TYPE, org.objectweb.asm.Type.INT_TYPE); + } + + if (exact) { + switch (sort) { + case INT: + switch (token) { + case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_INT); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break; + case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_INT); break; + case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_INT); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + + break; + case LONG: + switch (token) { + case MUL: execute.invokeStatic(definition.mathType.type, MULEXACT_LONG); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break; + case ADD: execute.invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; + case SUB: execute.invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + + break; + case FLOAT: + switch (token) { + case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break; + case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break; + case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; + case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + + break; + case DOUBLE: + switch (token) { + case MUL: execute.invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break; + case DIV: execute.invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break; + case REM: execute.invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break; + case ADD: execute.invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; + case SUB: execute.invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + + break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } else { + if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + + if (sort == Sort.DEF) { + switch (token) { + case MUL: execute.invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break; + case DIV: execute.invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break; + case REM: execute.invokeStatic(definition.defobjType.type, DEF_REM_CALL); break; + case ADD: execute.invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break; + case SUB: execute.invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break; + case LSH: execute.invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break; + case USH: execute.invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break; + case RSH: execute.invokeStatic(definition.defobjType.type, DEF_USH_CALL); break; + case BWAND: execute.invokeStatic(definition.defobjType.type, DEF_AND_CALL); break; + case BWXOR: execute.invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; + case BWOR: execute.invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } else { + switch (token) { + case MUL: execute.math(GeneratorAdapter.MUL, type.type); break; + case DIV: execute.math(GeneratorAdapter.DIV, type.type); break; + case REM: execute.math(GeneratorAdapter.REM, type.type); break; + case ADD: execute.math(GeneratorAdapter.ADD, type.type); break; + case SUB: execute.math(GeneratorAdapter.SUB, type.type); break; + case LSH: execute.math(GeneratorAdapter.SHL, type.type); break; + case USH: execute.math(GeneratorAdapter.USHR, type.type); break; + case RSH: execute.math(GeneratorAdapter.SHR, type.type); break; + case BWAND: execute.math(GeneratorAdapter.AND, type.type); break; + case BWXOR: execute.math(GeneratorAdapter.XOR, type.type); break; + case BWOR: execute.math(GeneratorAdapter.OR, type.type); break; + default: + throw new IllegalStateException(error(source) + "Unexpected writer state."); + } + } + } + } + + /** + * Called for any compound assignment (including increment/decrement instructions). + * We have to be stricter than writeBinary, and do overflow checks against the original type's size + * instead of the promoted type's size, since the result will be implicitly cast back. + * + * @return true if an instruction is written, false otherwise + */ + private boolean writeExactInstruction(final Sort osort, final Sort psort) { + if (psort == Sort.DOUBLE) { + if (osort == Sort.FLOAT) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + } else if (osort == Sort.FLOAT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + execute.checkCast(definition.floatobjType.type); + } else if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.FLOAT) { + if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.LONG) { + if (osort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.INT) { + if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else { + return false; + } + + return true; + } + + private void writeLoadStoreExternal(final ParserRuleContext source) { + final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); + final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + + final boolean length = "#length".equals(sourceenmd.target); + final boolean array = "#brace".equals(sourceenmd.target); + final boolean name = sourceenmd.target instanceof String && !length && !array; + final boolean variable = sourceenmd.target instanceof Integer; + final boolean field = sourceenmd.target instanceof Field; + final boolean shortcut = sourceenmd.target instanceof Object[]; + + if (!length && !variable && !field && !array && !name && !shortcut) { + throw new IllegalStateException(error(source) + "Target not found for load/store."); + } + + final boolean maplist = shortcut && (boolean)((Object[])sourceenmd.target)[2]; + final Object constant = shortcut ? ((Object[])sourceenmd.target)[3] : null; + + final boolean x1 = field || name || (shortcut && !maplist); + final boolean x2 = array || (shortcut && maplist); + + if (length) { + execute.arrayLength(); + } else if (sourceenmd.last && parentemd.storeExpr != null) { + final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + final boolean cat = strings.contains(parentemd.storeExpr); + + if (cat) { + if (field || name || shortcut) { + execute.dupX1(); + } else if (array) { + execute.dup2X1(); + } + + if (maplist) { + if (constant != null) { + writeConstant(source, constant); + } + + execute.dupX2(); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + writeAppendStrings(sourceenmd.type.sort); + visit(parentemd.storeExpr); + + if (strings.contains(parentemd.storeExpr)) { + writeAppendStrings(expremd.to.sort); + strings.remove(parentemd.storeExpr); + } + + writeToStrings(); + checkWriteCast(source, sourceenmd.castTo); + + if (parentemd.read) { + writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } else if (parentemd.token > 0) { + final int token = parentemd.token; + + if (field || name || shortcut) { + execute.dup(); + } else if (array) { + execute.dup2(); + } + + if (maplist) { + if (constant != null) { + writeConstant(source, constant); + } + + execute.dupX1(); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + + if (parentemd.read && parentemd.post) { + writeDup(sourceenmd.type.sort.size, x1, x2); + } + + checkWriteCast(source, sourceenmd.castFrom); + visit(parentemd.storeExpr); + + writeBinaryInstruction(source, sourceenmd.promote, token); + + boolean exact = false; + + if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF && + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { + exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort); + } + + if (!exact) { + checkWriteCast(source, sourceenmd.castTo); + } + + if (parentemd.read && !parentemd.post) { + writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } else { + if (constant != null) { + writeConstant(source, constant); + } + + visit(parentemd.storeExpr); + + if (parentemd.read) { + writeDup(sourceenmd.type.sort.size, x1, x2); + } + + writeLoadStoreInstruction(source, true, variable, field, name, array, shortcut); + } + } else { + if (constant != null) { + writeConstant(source, constant); + } + + writeLoadStoreInstruction(source, false, variable, field, name, array, shortcut); + } + } + + private void writeLoadStoreInstruction(final ParserRuleContext source, + final boolean store, final boolean variable, + final boolean field, final boolean name, + final boolean array, final boolean shortcut) { + final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); + + if (variable) { + writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); + } else if (field) { + writeLoadStoreField(store, (Field)sourceemd.target); + } else if (name) { + writeLoadStoreField(source, store, (String)sourceemd.target); + } else if (array) { + writeLoadStoreArray(source, store, sourceemd.type); + } else if (shortcut) { + Object[] targets = (Object[])sourceemd.target; + writeLoadStoreShortcut(store, (Method)targets[0], (Method)targets[1]); + } else { + throw new IllegalStateException(error(source) + "Load/Store requires a variable, field, or array."); + } + } + + private void writeLoadStoreVariable(final ParserRuleContext source, final boolean store, + final Type type, final int slot) { + if (type.sort == Sort.VOID) { + throw new IllegalStateException(error(source) + "Cannot load/store void type."); + } + + if (store) { + execute.visitVarInsn(type.type.getOpcode(Opcodes.ISTORE), slot); + } else { + execute.visitVarInsn(type.type.getOpcode(Opcodes.ILOAD), slot); + } + } + + private void writeLoadStoreField(final boolean store, final Field field) { + if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { + if (store) { + execute.putStatic(field.owner.type, field.reflect.getName(), field.type.type); + } else { + execute.getStatic(field.owner.type, field.reflect.getName(), field.type.type); + + if (!field.generic.clazz.equals(field.type.clazz)) { + execute.checkCast(field.generic.type); + } + } + } else { + if (store) { + execute.putField(field.owner.type, field.reflect.getName(), field.type.type); + } else { + execute.getField(field.owner.type, field.reflect.getName(), field.type.type); + + if (!field.generic.clazz.equals(field.type.clazz)) { + execute.checkCast(field.generic.type); + } + } + } + } + + private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { + if (store) { + final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); + final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceemd.parent); + final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + + execute.push(name); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(parentemd.token == 0 && expremd.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_FIELD_STORE); + } else { + execute.push(name); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.invokeStatic(definition.defobjType.type, DEF_FIELD_LOAD); + } + } + + private void writeLoadStoreArray(final ParserRuleContext source, final boolean store, final Type type) { + if (type.sort == Sort.VOID) { + throw new IllegalStateException(error(source) + "Cannot load/store void type."); + } + + if (type.sort == Sort.DEF) { + final ExtbraceContext bracectx = (ExtbraceContext)source; + final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(bracectx.expression()); + + if (store) { + final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(bracectx); + final ExternalMetadata parentemd = adapter.getExternalMetadata(braceenmd.parent); + final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(parentemd.storeExpr); + + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(expremd0.typesafe); + execute.push(parentemd.token == 0 && expremd1.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_STORE); + } else { + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + execute.push(expremd0.typesafe); + execute.invokeStatic(definition.defobjType.type, DEF_ARRAY_LOAD); + } + } else { + if (store) { + execute.arrayStore(type.type); + } else { + execute.arrayLoad(type.type); + } + } + } + + private void writeLoadStoreShortcut(final boolean store, final Method getter, final Method setter) { + final Method method = store ? setter : getter; + + if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { + execute.invokeInterface(method.owner.type, method.method); + } else { + execute.invokeVirtual(method.owner.type, method.method); + } + + if (store) { + writePop(method.rtn.type.getSize()); + } else if (!method.rtn.clazz.equals(method.handle.type().returnType())) { + execute.checkCast(method.rtn.type); + } + } + + private void writeDup(final int size, final boolean x1, final boolean x2) { + if (size == 1) { + if (x2) { + execute.dupX2(); + } else if (x1) { + execute.dupX1(); + } else { + execute.dup(); + } + } else if (size == 2) { + if (x2) { + execute.dup2X2(); + } else if (x1) { + execute.dup2X1(); + } else { + execute.dup2(); + } + } + } + + private void writeNewExternal(final ExtnewContext source) { + final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); + final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + + final boolean makearray = "#makearray".equals(sourceenmd.target); + final boolean constructor = sourceenmd.target instanceof Constructor; + + if (!makearray && !constructor) { + throw new IllegalStateException(error(source) + "Target not found for new call."); + } + + if (makearray) { + for (final ExpressionContext exprctx : source.expression()) { + visit(exprctx); + } + + if (sourceenmd.type.sort == Sort.ARRAY) { + execute.visitMultiANewArrayInsn(sourceenmd.type.type.getDescriptor(), sourceenmd.type.type.getDimensions()); + } else { + execute.newArray(sourceenmd.type.type); + } + } else { + execute.newInstance(sourceenmd.type.type); + + if (parentemd.read) { + execute.dup(); + } + + for (final ExpressionContext exprctx : source.arguments().expression()) { + visit(exprctx); + } + + final Constructor target = (Constructor)sourceenmd.target; + execute.invokeConstructor(target.owner.type, target.method); + } + } + + private void writeCallExternal(final ExtcallContext source) { + final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); + + final boolean method = sourceenmd.target instanceof Method; + final boolean def = sourceenmd.target instanceof String; + + if (!method && !def) { + throw new IllegalStateException(error(source) + "Target not found for call."); + } + + final List arguments = source.arguments().expression(); + + if (method) { + for (final ExpressionContext exprctx : arguments) { + visit(exprctx); + } + + final Method target = (Method)sourceenmd.target; + + if (java.lang.reflect.Modifier.isStatic(target.reflect.getModifiers())) { + execute.invokeStatic(target.owner.type, target.method); + } else if (java.lang.reflect.Modifier.isInterface(target.owner.clazz.getModifiers())) { + execute.invokeInterface(target.owner.type, target.method); + } else { + execute.invokeVirtual(target.owner.type, target.method); + } + + if (!target.rtn.clazz.equals(target.handle.type().returnType())) { + execute.checkCast(target.rtn.type); + } + } else { + execute.push((String)sourceenmd.target); + execute.loadThis(); + execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); + + execute.push(arguments.size()); + execute.newArray(definition.defType.type); + + for (int argument = 0; argument < arguments.size(); ++argument) { + execute.dup(); + execute.push(argument); + visit(arguments.get(argument)); + execute.arrayStore(definition.defType.type); + } + + execute.push(arguments.size()); + execute.newArray(definition.booleanType.type); + + for (int argument = 0; argument < arguments.size(); ++argument) { + execute.dup(); + execute.push(argument); + execute.push(adapter.getExpressionMetadata(arguments.get(argument)).typesafe); + execute.arrayStore(definition.booleanType.type); + } + + execute.invokeStatic(definition.defobjType.type, DEF_METHOD_CALL); + } + } + + private void writePop(final int size) { + if (size == 1) { + execute.pop(); + } else if (size == 2) { + execute.pop2(); + } + } + + private void checkWriteCast(final ExpressionMetadata sort) { + checkWriteCast(sort.source, sort.cast); + } + + private void checkWriteCast(final ParserRuleContext source, final Cast cast) { + if (cast instanceof Transform) { + writeTransform((Transform)cast); + } else if (cast != null) { + writeCast(cast); + } else { + throw new IllegalStateException(error(source) + "Unexpected cast object."); + } + } + + private void writeCast(final Cast cast) { + final Type from = cast.from; + final Type to = cast.to; + + if (from.equals(to)) { + return; + } + + if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { + execute.cast(from.type, to.type); + } else { + try { + from.clazz.asSubclass(to.clazz); + } catch (ClassCastException exception) { + execute.checkCast(to.type); + } + } + } + + private void writeTransform(final Transform transform) { + if (transform.upcast != null) { + execute.checkCast(transform.upcast.type); + } + + if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) { + execute.invokeStatic(transform.method.owner.type, transform.method.method); + } else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) { + execute.invokeInterface(transform.method.owner.type, transform.method.method); + } else { + execute.invokeVirtual(transform.method.owner.type, transform.method.method); + } + + if (transform.downcast != null) { + execute.checkCast(transform.downcast.type); + } + } + + void checkWriteBranch(final ParserRuleContext source) { + final Branch branch = getBranch(source); + + if (branch != null) { + if (branch.tru != null) { + execute.visitJumpInsn(Opcodes.IFNE, branch.tru); + } else if (branch.fals != null) { + execute.visitJumpInsn(Opcodes.IFEQ, branch.fals); + } + } + } + + private void writeEnd() { + writer.visitEnd(); + } + + private byte[] getBytes() { + return writer.toByteArray(); + } +} diff --git a/plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy b/plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000000..e45c1b86ceb --- /dev/null +++ b/plugins/lang-plan-a/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + // needed to generate runtime classes + permission java.lang.RuntimePermission "createClassLoader"; +}; diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java new file mode 100644 index 00000000000..af7eb25a6c0 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AdditionTests.java @@ -0,0 +1,199 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.util.HashMap; +import java.util.Map; + +/** Tests for addition operator across all types */ +//TODO: NaN/Inf/overflow/... +public class AdditionTests extends ScriptTestCase { + + public void testInt() throws Exception { + assertEquals(1+1, exec("int x = 1; int y = 1; return x+y;")); + assertEquals(1+2, exec("int x = 1; int y = 2; return x+y;")); + assertEquals(5+10, exec("int x = 5; int y = 10; return x+y;")); + assertEquals(1+1+2, exec("int x = 1; int y = 1; int z = 2; return x+y+z;")); + assertEquals((1+1)+2, exec("int x = 1; int y = 1; int z = 2; return (x+y)+z;")); + assertEquals(1+(1+2), exec("int x = 1; int y = 1; int z = 2; return x+(y+z);")); + assertEquals(0+1, exec("int x = 0; int y = 1; return x+y;")); + assertEquals(1+0, exec("int x = 1; int y = 0; return x+y;")); + assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); + assertEquals(0+0, exec("int x = 0; int y = 0; return x+y;")); + } + + public void testIntConst() throws Exception { + assertEquals(1+1, exec("return 1+1;")); + assertEquals(1+2, exec("return 1+2;")); + assertEquals(5+10, exec("return 5+10;")); + assertEquals(1+1+2, exec("return 1+1+2;")); + assertEquals((1+1)+2, exec("return (1+1)+2;")); + assertEquals(1+(1+2), exec("return 1+(1+2);")); + assertEquals(0+1, exec("return 0+1;")); + assertEquals(1+0, exec("return 1+0;")); + assertEquals(0+0, exec("return 0+0;")); + } + + public void testByte() throws Exception { + assertEquals((byte)1+(byte)1, exec("byte x = 1; byte y = 1; return x+y;")); + assertEquals((byte)1+(byte)2, exec("byte x = 1; byte y = 2; return x+y;")); + assertEquals((byte)5+(byte)10, exec("byte x = 5; byte y = 10; return x+y;")); + assertEquals((byte)1+(byte)1+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x+y+z;")); + assertEquals(((byte)1+(byte)1)+(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x+y)+z;")); + assertEquals((byte)1+((byte)1+(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x+(y+z);")); + assertEquals((byte)0+(byte)1, exec("byte x = 0; byte y = 1; return x+y;")); + assertEquals((byte)1+(byte)0, exec("byte x = 1; byte y = 0; return x+y;")); + assertEquals((byte)0+(byte)0, exec("byte x = 0; byte y = 0; return x+y;")); + } + + public void testByteConst() throws Exception { + assertEquals((byte)1+(byte)1, exec("return (byte)1+(byte)1;")); + assertEquals((byte)1+(byte)2, exec("return (byte)1+(byte)2;")); + assertEquals((byte)5+(byte)10, exec("return (byte)5+(byte)10;")); + assertEquals((byte)1+(byte)1+(byte)2, exec("return (byte)1+(byte)1+(byte)2;")); + assertEquals(((byte)1+(byte)1)+(byte)2, exec("return ((byte)1+(byte)1)+(byte)2;")); + assertEquals((byte)1+((byte)1+(byte)2), exec("return (byte)1+((byte)1+(byte)2);")); + assertEquals((byte)0+(byte)1, exec("return (byte)0+(byte)1;")); + assertEquals((byte)1+(byte)0, exec("return (byte)1+(byte)0;")); + assertEquals((byte)0+(byte)0, exec("return (byte)0+(byte)0;")); + } + + public void testChar() throws Exception { + assertEquals((char)1+(char)1, exec("char x = 1; char y = 1; return x+y;")); + assertEquals((char)1+(char)2, exec("char x = 1; char y = 2; return x+y;")); + assertEquals((char)5+(char)10, exec("char x = 5; char y = 10; return x+y;")); + assertEquals((char)1+(char)1+(char)2, exec("char x = 1; char y = 1; char z = 2; return x+y+z;")); + assertEquals(((char)1+(char)1)+(char)2, exec("char x = 1; char y = 1; char z = 2; return (x+y)+z;")); + assertEquals((char)1+((char)1+(char)2), exec("char x = 1; char y = 1; char z = 2; return x+(y+z);")); + assertEquals((char)0+(char)1, exec("char x = 0; char y = 1; return x+y;")); + assertEquals((char)1+(char)0, exec("char x = 1; char y = 0; return x+y;")); + assertEquals((char)0+(char)0, exec("char x = 0; char y = 0; return x+y;")); + } + + public void testCharConst() throws Exception { + assertEquals((char)1+(char)1, exec("return (char)1+(char)1;")); + assertEquals((char)1+(char)2, exec("return (char)1+(char)2;")); + assertEquals((char)5+(char)10, exec("return (char)5+(char)10;")); + assertEquals((char)1+(char)1+(char)2, exec("return (char)1+(char)1+(char)2;")); + assertEquals(((char)1+(char)1)+(char)2, exec("return ((char)1+(char)1)+(char)2;")); + assertEquals((char)1+((char)1+(char)2), exec("return (char)1+((char)1+(char)2);")); + assertEquals((char)0+(char)1, exec("return (char)0+(char)1;")); + assertEquals((char)1+(char)0, exec("return (char)1+(char)0;")); + assertEquals((char)0+(char)0, exec("return (char)0+(char)0;")); + } + + public void testShort() throws Exception { + assertEquals((short)1+(short)1, exec("short x = 1; short y = 1; return x+y;")); + assertEquals((short)1+(short)2, exec("short x = 1; short y = 2; return x+y;")); + assertEquals((short)5+(short)10, exec("short x = 5; short y = 10; return x+y;")); + assertEquals((short)1+(short)1+(short)2, exec("short x = 1; short y = 1; short z = 2; return x+y+z;")); + assertEquals(((short)1+(short)1)+(short)2, exec("short x = 1; short y = 1; short z = 2; return (x+y)+z;")); + assertEquals((short)1+((short)1+(short)2), exec("short x = 1; short y = 1; short z = 2; return x+(y+z);")); + assertEquals((short)0+(short)1, exec("short x = 0; short y = 1; return x+y;")); + assertEquals((short)1+(short)0, exec("short x = 1; short y = 0; return x+y;")); + assertEquals((short)0+(short)0, exec("short x = 0; short y = 0; return x+y;")); + } + + public void testShortConst() throws Exception { + assertEquals((short)1+(short)1, exec("return (short)1+(short)1;")); + assertEquals((short)1+(short)2, exec("return (short)1+(short)2;")); + assertEquals((short)5+(short)10, exec("return (short)5+(short)10;")); + assertEquals((short)1+(short)1+(short)2, exec("return (short)1+(short)1+(short)2;")); + assertEquals(((short)1+(short)1)+(short)2, exec("return ((short)1+(short)1)+(short)2;")); + assertEquals((short)1+((short)1+(short)2), exec("return (short)1+((short)1+(short)2);")); + assertEquals((short)0+(short)1, exec("return (short)0+(short)1;")); + assertEquals((short)1+(short)0, exec("return (short)1+(short)0;")); + assertEquals((short)0+(short)0, exec("return (short)0+(short)0;")); + } + + public void testLong() throws Exception { + assertEquals(1L+1L, exec("long x = 1; long y = 1; return x+y;")); + assertEquals(1L+2L, exec("long x = 1; long y = 2; return x+y;")); + assertEquals(5L+10L, exec("long x = 5; long y = 10; return x+y;")); + assertEquals(1L+1L+2L, exec("long x = 1; long y = 1; long z = 2; return x+y+z;")); + assertEquals((1L+1L)+2L, exec("long x = 1; long y = 1; long z = 2; return (x+y)+z;")); + assertEquals(1L+(1L+2L), exec("long x = 1; long y = 1; long z = 2; return x+(y+z);")); + assertEquals(0L+1L, exec("long x = 0; long y = 1; return x+y;")); + assertEquals(1L+0L, exec("long x = 1; long y = 0; return x+y;")); + assertEquals(0L+0L, exec("long x = 0; long y = 0; return x+y;")); + } + + public void testLongConst() throws Exception { + assertEquals(1L+1L, exec("return 1L+1L;")); + assertEquals(1L+2L, exec("return 1L+2L;")); + assertEquals(5L+10L, exec("return 5L+10L;")); + assertEquals(1L+1L+2L, exec("return 1L+1L+2L;")); + assertEquals((1L+1L)+2L, exec("return (1L+1L)+2L;")); + assertEquals(1L+(1L+2L), exec("return 1L+(1L+2L);")); + assertEquals(0L+1L, exec("return 0L+1L;")); + assertEquals(1L+0L, exec("return 1L+0L;")); + assertEquals(0L+0L, exec("return 0L+0L;")); + } + + public void testFloat() throws Exception { + assertEquals(1F+1F, exec("float x = 1F; float y = 1F; return x+y;")); + assertEquals(1F+2F, exec("float x = 1F; float y = 2F; return x+y;")); + assertEquals(5F+10F, exec("float x = 5F; float y = 10F; return x+y;")); + assertEquals(1F+1F+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+y+z;")); + assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return (x+y)+z;")); + assertEquals((1F+1F)+2F, exec("float x = 1F; float y = 1F; float z = 2F; return x+(y+z);")); + assertEquals(0F+1F, exec("float x = 0F; float y = 1F; return x+y;")); + assertEquals(1F+0F, exec("float x = 1F; float y = 0F; return x+y;")); + assertEquals(0F+0F, exec("float x = 0F; float y = 0F; return x+y;")); + } + + public void testFloatConst() throws Exception { + assertEquals(1F+1F, exec("return 1F+1F;")); + assertEquals(1F+2F, exec("return 1F+2F;")); + assertEquals(5F+10F, exec("return 5F+10F;")); + assertEquals(1F+1F+2F, exec("return 1F+1F+2F;")); + assertEquals((1F+1F)+2F, exec("return (1F+1F)+2F;")); + assertEquals(1F+(1F+2F), exec("return 1F+(1F+2F);")); + assertEquals(0F+1F, exec("return 0F+1F;")); + assertEquals(1F+0F, exec("return 1F+0F;")); + assertEquals(0F+0F, exec("return 0F+0F;")); + } + + public void testDouble() throws Exception { + assertEquals(1.0+1.0, exec("double x = 1.0; double y = 1.0; return x+y;")); + assertEquals(1.0+2.0, exec("double x = 1.0; double y = 2.0; return x+y;")); + assertEquals(5.0+10.0, exec("double x = 5.0; double y = 10.0; return x+y;")); + assertEquals(1.0+1.0+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+y+z;")); + assertEquals((1.0+1.0)+2.0, exec("double x = 1.0; double y = 1.0; double z = 2.0; return (x+y)+z;")); + assertEquals(1.0+(1.0+2.0), exec("double x = 1.0; double y = 1.0; double z = 2.0; return x+(y+z);")); + assertEquals(0.0+1.0, exec("double x = 0.0; double y = 1.0; return x+y;")); + assertEquals(1.0+0.0, exec("double x = 1.0; double y = 0.0; return x+y;")); + assertEquals(0.0+0.0, exec("double x = 0.0; double y = 0.0; return x+y;")); + } + + public void testDoubleConst() throws Exception { + assertEquals(1.0+1.0, exec("return 1.0+1.0;")); + assertEquals(1.0+2.0, exec("return 1.0+2.0;")); + assertEquals(5.0+10.0, exec("return 5.0+10.0;")); + assertEquals(1.0+1.0+2.0, exec("return 1.0+1.0+2.0;")); + assertEquals((1.0+1.0)+2.0, exec("return (1.0+1.0)+2.0;")); + assertEquals(1.0+(1.0+2.0), exec("return 1.0+(1.0+2.0);")); + assertEquals(0.0+1.0, exec("return 0.0+1.0;")); + assertEquals(1.0+0.0, exec("return 1.0+0.0;")); + assertEquals(0.0+0.0, exec("return 0.0+0.0;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java new file mode 100644 index 00000000000..6a4168415dc --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/AndTests.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for and operator across all types */ +public class AndTests extends ScriptTestCase { + + public void testInt() throws Exception { + assertEquals(5 & 12, exec("int x = 5; int y = 12; return x & y;")); + assertEquals(5 & -12, exec("int x = 5; int y = -12; return x & y;")); + assertEquals(7 & 15 & 3, exec("int x = 7; int y = 15; int z = 3; return x & y & z;")); + } + + public void testIntConst() throws Exception { + assertEquals(5 & 12, exec("return 5 & 12;")); + assertEquals(5 & -12, exec("return 5 & -12;")); + assertEquals(7 & 15 & 3, exec("return 7 & 15 & 3;")); + } + + public void testLong() throws Exception { + assertEquals(5L & 12L, exec("long x = 5; long y = 12; return x & y;")); + assertEquals(5L & -12L, exec("long x = 5; long y = -12; return x & y;")); + assertEquals(7L & 15L & 3L, exec("long x = 7; long y = 15; long z = 3; return x & y & z;")); + } + + public void testLongConst() throws Exception { + assertEquals(5L & 12L, exec("return 5L & 12L;")); + assertEquals(5L & -12L, exec("return 5L & -12L;")); + assertEquals(7L & 15L & 3L, exec("return 7L & 15L & 3L;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java new file mode 100644 index 00000000000..6af8adab564 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicExpressionTests.java @@ -0,0 +1,126 @@ +package org.elasticsearch.plan.a; + +import java.util.Collections; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +public class BasicExpressionTests extends ScriptTestCase { + + /** simple tests returning a constant value */ + public void testReturnConstant() { + assertEquals(5, exec("return 5;")); + assertEquals(7L, exec("return 7L;")); + assertEquals(7.0, exec("return 7.0;")); + assertEquals(32.0F, exec("return 32.0F;")); + assertEquals((byte)255, exec("return (byte)255;")); + assertEquals((short)5, exec("return (short)5;")); + assertEquals("string", exec("return \"string\";")); + assertEquals(true, exec("return true;")); + assertEquals(false, exec("return false;")); + assertNull(exec("return null;")); + } + + public void testReturnConstantChar() { + assertEquals('x', exec("return 'x';")); + } + + public void testConstantCharTruncation() { + assertEquals('èš ', exec("return (char)100000;")); + } + + /** declaring variables for primitive types */ + public void testDeclareVariable() { + assertEquals(5, exec("int i = 5; return i;")); + assertEquals(7L, exec("long l = 7; return l;")); + assertEquals(7.0, exec("double d = 7; return d;")); + assertEquals(32.0F, exec("float f = 32F; return f;")); + assertEquals((byte)255, exec("byte b = (byte)255; return b;")); + assertEquals((short)5, exec("short s = (short)5; return s;")); + assertEquals("string", exec("String s = \"string\"; return s;")); + assertEquals(true, exec("boolean v = true; return v;")); + assertEquals(false, exec("boolean v = false; return v;")); + } + + public void testCast() { + assertEquals(1, exec("return (int)1.0;")); + assertEquals((byte)100, exec("double x = 100; return (byte)x;")); + + assertEquals(3, exec( + "Map x = new HashMap();\n" + + "Object y = x;\n" + + "((Map)y).put(2, 3);\n" + + "return x.get(2);\n")); + } + + public void testCat() { + assertEquals("aaabbb", exec("return \"aaa\" + \"bbb\";")); + assertEquals("aaabbb", exec("String aaa = \"aaa\", bbb = \"bbb\"; return aaa + bbb;")); + + assertEquals("aaabbbbbbbbb", exec( + "String aaa = \"aaa\", bbb = \"bbb\"; int x;\n" + + "for (; x < 3; ++x) \n" + + " aaa += bbb;\n" + + "return aaa;")); + } + + public void testComp() { + assertEquals(true, exec("return 2 < 3;")); + assertEquals(false, exec("int x = 4; char y = 2; return x < y;")); + assertEquals(true, exec("return 3 <= 3;")); + assertEquals(true, exec("int x = 3; char y = 3; return x <= y;")); + assertEquals(false, exec("return 2 > 3;")); + assertEquals(true, exec("int x = 4; long y = 2; return x > y;")); + assertEquals(false, exec("return 3 >= 4;")); + assertEquals(true, exec("double x = 3; float y = 3; return x >= y;")); + assertEquals(false, exec("return 3 == 4;")); + assertEquals(true, exec("double x = 3; float y = 3; return x == y;")); + assertEquals(true, exec("return 3 != 4;")); + assertEquals(false, exec("double x = 3; float y = 3; return x != y;")); + } + + /** + * Test boxed objects in various places + */ + public void testBoxing() { + // return + assertEquals(4, exec("return input.get(\"x\");", Collections.singletonMap("x", 4))); + // assignment + assertEquals(4, exec("int y = (Integer)input.get(\"x\"); return y;", Collections.singletonMap("x", 4))); + // comparison + assertEquals(true, exec("return 5 > (Integer)input.get(\"x\");", Collections.singletonMap("x", 4))); + } + + public void testBool() { + assertEquals(true, exec("return true && true;")); + assertEquals(false, exec("boolean a = true, b = false; return a && b;")); + assertEquals(true, exec("return true || true;")); + assertEquals(true, exec("boolean a = true, b = false; return a || b;")); + } + + public void testConditional() { + assertEquals(1, exec("int x = 5; return x > 3 ? 1 : 0;")); + assertEquals(0, exec("String a = null; return a != null ? 1 : 0;")); + } + + public void testPrecedence() { + assertEquals(2, exec("int x = 5; return (x+x)/x;")); + assertEquals(true, exec("boolean t = true, f = false; return t && (f || t);")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java new file mode 100644 index 00000000000..07ad32d74af --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicStatementTests.java @@ -0,0 +1,178 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.util.HashMap; +import java.util.Map; + +public class BasicStatementTests extends ScriptTestCase { + + public void testIfStatement() { + assertEquals(1, exec("int x = 5; if (x == 5) return 1; return 0;")); + assertEquals(0, exec("int x = 4; if (x == 5) return 1; else return 0;")); + assertEquals(2, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 2; else return 0;")); + assertEquals(1, exec("int x = 4; if (x == 5) return 1; else if (x == 4) return 1; else return 0;")); + + assertEquals(3, exec( + "int x = 5;\n" + + "if (x == 5) {\n" + + " int y = 2;\n" + + " \n" + + " if (y == 2) {\n" + + " x = 3;\n" + + " }\n" + + " \n" + + "}\n" + + "\n" + + "return x;\n")); + } + + public void testWhileStatement() { + + assertEquals("aaaaaa", exec("String c = \"a\"; int x; while (x < 5) { c += \"a\"; ++x; } return c;")); + + Object value = exec( + " byte[][] b = new byte[5][5]; \n" + + " byte x = 0, y; \n" + + " \n" + + " while (x < 5) { \n" + + " y = 0; \n" + + " \n" + + " while (y < 5) { \n" + + " b[x][y] = (byte)(x*y); \n" + + " ++y; \n" + + " } \n" + + " \n" + + " ++x; \n" + + " } \n" + + " \n" + + " return b; \n"); + + byte[][] b = (byte[][])value; + + for (byte x = 0; x < 5; ++x) { + for (byte y = 0; y < 5; ++y) { + assertEquals(x*y, b[x][y]); + } + } + } + + public void testDoWhileStatement() { + assertEquals("aaaaaa", exec("String c = \"a\"; int x; do { c += \"a\"; ++x; } while (x < 5); return c;")); + + Object value = exec( + " int[][] b = new int[5][5]; \n" + + " int x = 0, y; \n" + + " \n" + + " do { \n" + + " y = 0; \n" + + " \n" + + " do { \n" + + " b[x][y] = x*y; \n" + + " ++y; \n" + + " } while (y < 5); \n" + + " \n" + + " ++x; \n" + + " } while (x < 5); \n" + + " \n" + + " return b; \n"); + + int[][] b = (int[][])value; + + for (byte x = 0; x < 5; ++x) { + for (byte y = 0; y < 5; ++y) { + assertEquals(x*y, b[x][y]); + } + } + } + + public void testForStatement() { + assertEquals("aaaaaa", exec("String c = \"a\"; for (int x = 0; x < 5; ++x) c += \"a\"; return c;")); + + Object value = exec( + " int[][] b = new int[5][5]; \n" + + " for (int x = 0; x < 5; ++x) { \n" + + " for (int y = 0; y < 5; ++y) { \n" + + " b[x][y] = x*y; \n" + + " } \n" + + " } \n" + + " \n" + + " return b; \n"); + + int[][] b = (int[][])value; + + for (byte x = 0; x < 5; ++x) { + for (byte y = 0; y < 5; ++y) { + assertEquals(x*y, b[x][y]); + } + } + } + + public void testDeclarationStatement() { + assertEquals((byte)2, exec("byte a = 2; return a;")); + assertEquals((short)2, exec("short a = 2; return a;")); + assertEquals((char)2, exec("char a = 2; return a;")); + assertEquals(2, exec("int a = 2; return a;")); + assertEquals(2L, exec("long a = 2; return a;")); + assertEquals(2F, exec("float a = 2; return a;")); + assertEquals(2.0, exec("double a = 2; return a;")); + assertEquals(false, exec("boolean a = false; return a;")); + assertEquals("string", exec("String a = \"string\"; return a;")); + assertEquals(HashMap.class, exec("Map a = new HashMap(); return a;").getClass()); + + assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a;").getClass()); + assertEquals(short[].class, exec("short[] a = new short[1]; return a;").getClass()); + assertEquals(char[].class, exec("char[] a = new char[1]; return a;").getClass()); + assertEquals(int[].class, exec("int[] a = new int[1]; return a;").getClass()); + assertEquals(long[].class, exec("long[] a = new long[1]; return a;").getClass()); + assertEquals(float[].class, exec("float[] a = new float[1]; return a;").getClass()); + assertEquals(double[].class, exec("double[] a = new double[1]; return a;").getClass()); + assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a;").getClass()); + assertEquals(String[].class, exec("String[] a = new String[1]; return a;").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a;").getClass()); + + assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a;").getClass()); + assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a;").getClass()); + assertEquals(char[][][][].class, exec("char[][][][] a = new char[1][2][3][4]; return a;").getClass()); + assertEquals(int[][][][][].class, exec("int[][][][][] a = new int[1][2][3][4][5]; return a;").getClass()); + assertEquals(long[][].class, exec("long[][] a = new long[1][2]; return a;").getClass()); + assertEquals(float[][][].class, exec("float[][][] a = new float[1][2][3]; return a;").getClass()); + assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a;").getClass()); + assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a;").getClass()); + assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a;").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a;").getClass()); + } + + public void testContinueStatement() { + assertEquals(9, exec("int x = 0, y = 0; while (x < 10) { ++x; if (x == 1) continue; ++y; } return y;")); + } + + public void testBreakStatement() { + assertEquals(4, exec("int x = 0, y = 0; while (x < 10) { ++x; if (x == 5) break; ++y; } return y;")); + } + + public void testReturnStatement() { + assertEquals(10, exec("return 10;")); + assertEquals(5, exec("int x = 5; return x;")); + assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1];")); + assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]); + assertEquals(10, ((Map)exec("Map s = new HashMap< String , Object >(); s.put(\"x\", 10); return s;")).get("x")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java new file mode 100644 index 00000000000..032cdcde5e0 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BinaryOperatorTests.java @@ -0,0 +1,294 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** + * Tests binary operators across different types + */ +// TODO: NaN/Inf/overflow/... +public class BinaryOperatorTests extends ScriptTestCase { + + // TODO: move to per-type tests and test for each type + public void testBasics() { + assertEquals(2.25F / 1.5F, exec("return 2.25F / 1.5F;")); + assertEquals(2.25F % 1.5F, exec("return 2.25F % 1.5F;")); + assertEquals(2 - 1, exec("return 2 - 1;")); + assertEquals(1 << 2, exec("return 1 << 2;")); + assertEquals(4 >> 2, exec("return 4 >> 2;")); + assertEquals(-1 >>> 29, exec("return -1 >>> 29;")); + assertEquals(5 & 3, exec("return 5 & 3;")); + assertEquals(5 & 3L, exec("return 5 & 3L;")); + assertEquals(5L & 3, exec("return 5L & 3;")); + assertEquals(5 | 3, exec("return 5 | 3;")); + assertEquals(5L | 3, exec("return 5L | 3;")); + assertEquals(5 | 3L, exec("return 5 | 3L;")); + assertEquals(9 ^ 3, exec("return 9 ^ 3;")); + assertEquals(9L ^ 3, exec("return 9L ^ 3;")); + assertEquals(9 ^ 3L, exec("return 9 ^ 3L;")); + } + + public void testLongShifts() { + // note: we always promote the results of shifts too (unlike java) + assertEquals(1L << 2, exec("long x = 1L; int y = 2; return x << y;")); + assertEquals(1L << 2L, exec("long x = 1L; long y = 2L; return x << y;")); + assertEquals(4L >> 2L, exec("long x = 4L; long y = 2L; return x >> y;")); + assertEquals(4L >> 2, exec("long x = 4L; int y = 2; return x >> y;")); + assertEquals(-1L >>> 29, exec("long x = -1L; int y = 29; return x >>> y;")); + assertEquals(-1L >>> 29L, exec("long x = -1L; long y = 29L; return x >>> y;")); + } + + public void testLongShiftsConst() { + // note: we always promote the results of shifts too (unlike java) + assertEquals(1L << 2, exec("return 1L << 2;")); + assertEquals(1L << 2L, exec("return 1 << 2L;")); + assertEquals(4L >> 2L, exec("return 4 >> 2L;")); + assertEquals(4L >> 2, exec("return 4L >> 2;")); + assertEquals(-1L >>> 29, exec("return -1L >>> 29;")); + assertEquals(-1L >>> 29L, exec("return -1 >>> 29L;")); + } + + public void testMixedTypes() { + assertEquals(8, exec("int x = 4; char y = 2; return x*y;")); + assertEquals(0.5, exec("double x = 1; float y = 2; return x / y;")); + assertEquals(1, exec("int x = 3; int y = 2; return x % y;")); + assertEquals(3.0, exec("double x = 1; byte y = 2; return x + y;")); + assertEquals(-1, exec("int x = 1; char y = 2; return x - y;")); + assertEquals(4, exec("int x = 1; char y = 2; return x << y;")); + assertEquals(-1, exec("int x = -1; char y = 29; return x >> y;")); + assertEquals(3, exec("int x = -1; char y = 30; return x >>> y;")); + assertEquals(1L, exec("int x = 5; long y = 3; return x & y;")); + assertEquals(7, exec("short x = 5; byte y = 3; return x | y;")); + assertEquals(10, exec("short x = 9; char y = 3; return x ^ y;")); + } + + public void testBinaryPromotion() throws Exception { + // byte/byte + assertEquals((byte)1 + (byte)1, exec("byte x = 1; byte y = 1; return x+y;")); + // byte/char + assertEquals((byte)1 + (char)1, exec("byte x = 1; char y = 1; return x+y;")); + // byte/short + assertEquals((byte)1 + (short)1, exec("byte x = 1; short y = 1; return x+y;")); + // byte/int + assertEquals((byte)1 + 1, exec("byte x = 1; int y = 1; return x+y;")); + // byte/long + assertEquals((byte)1 + 1L, exec("byte x = 1; long y = 1; return x+y;")); + // byte/float + assertEquals((byte)1 + 1F, exec("byte x = 1; float y = 1; return x+y;")); + // byte/double + assertEquals((byte)1 + 1.0, exec("byte x = 1; double y = 1; return x+y;")); + + // char/byte + assertEquals((char)1 + (byte)1, exec("char x = 1; byte y = 1; return x+y;")); + // char/char + assertEquals((char)1 + (char)1, exec("char x = 1; char y = 1; return x+y;")); + // char/short + assertEquals((char)1 + (short)1, exec("char x = 1; short y = 1; return x+y;")); + // char/int + assertEquals((char)1 + 1, exec("char x = 1; int y = 1; return x+y;")); + // char/long + assertEquals((char)1 + 1L, exec("char x = 1; long y = 1; return x+y;")); + // char/float + assertEquals((char)1 + 1F, exec("char x = 1; float y = 1; return x+y;")); + // char/double + assertEquals((char)1 + 1.0, exec("char x = 1; double y = 1; return x+y;")); + + // short/byte + assertEquals((short)1 + (byte)1, exec("short x = 1; byte y = 1; return x+y;")); + // short/char + assertEquals((short)1 + (char)1, exec("short x = 1; char y = 1; return x+y;")); + // short/short + assertEquals((short)1 + (short)1, exec("short x = 1; short y = 1; return x+y;")); + // short/int + assertEquals((short)1 + 1, exec("short x = 1; int y = 1; return x+y;")); + // short/long + assertEquals((short)1 + 1L, exec("short x = 1; long y = 1; return x+y;")); + // short/float + assertEquals((short)1 + 1F, exec("short x = 1; float y = 1; return x+y;")); + // short/double + assertEquals((short)1 + 1.0, exec("short x = 1; double y = 1; return x+y;")); + + // int/byte + assertEquals(1 + (byte)1, exec("int x = 1; byte y = 1; return x+y;")); + // int/char + assertEquals(1 + (char)1, exec("int x = 1; char y = 1; return x+y;")); + // int/short + assertEquals(1 + (short)1, exec("int x = 1; short y = 1; return x+y;")); + // int/int + assertEquals(1 + 1, exec("int x = 1; int y = 1; return x+y;")); + // int/long + assertEquals(1 + 1L, exec("int x = 1; long y = 1; return x+y;")); + // int/float + assertEquals(1 + 1F, exec("int x = 1; float y = 1; return x+y;")); + // int/double + assertEquals(1 + 1.0, exec("int x = 1; double y = 1; return x+y;")); + + // long/byte + assertEquals(1L + (byte)1, exec("long x = 1; byte y = 1; return x+y;")); + // long/char + assertEquals(1L + (char)1, exec("long x = 1; char y = 1; return x+y;")); + // long/short + assertEquals(1L + (short)1, exec("long x = 1; short y = 1; return x+y;")); + // long/int + assertEquals(1L + 1, exec("long x = 1; int y = 1; return x+y;")); + // long/long + assertEquals(1L + 1L, exec("long x = 1; long y = 1; return x+y;")); + // long/float + assertEquals(1L + 1F, exec("long x = 1; float y = 1; return x+y;")); + // long/double + assertEquals(1L + 1.0, exec("long x = 1; double y = 1; return x+y;")); + + // float/byte + assertEquals(1F + (byte)1, exec("float x = 1; byte y = 1; return x+y;")); + // float/char + assertEquals(1F + (char)1, exec("float x = 1; char y = 1; return x+y;")); + // float/short + assertEquals(1F + (short)1, exec("float x = 1; short y = 1; return x+y;")); + // float/int + assertEquals(1F + 1, exec("float x = 1; int y = 1; return x+y;")); + // float/long + assertEquals(1F + 1L, exec("float x = 1; long y = 1; return x+y;")); + // float/float + assertEquals(1F + 1F, exec("float x = 1; float y = 1; return x+y;")); + // float/double + assertEquals(1F + 1.0, exec("float x = 1; double y = 1; return x+y;")); + + // double/byte + assertEquals(1.0 + (byte)1, exec("double x = 1; byte y = 1; return x+y;")); + // double/char + assertEquals(1.0 + (char)1, exec("double x = 1; char y = 1; return x+y;")); + // double/short + assertEquals(1.0 + (short)1, exec("double x = 1; short y = 1; return x+y;")); + // double/int + assertEquals(1.0 + 1, exec("double x = 1; int y = 1; return x+y;")); + // double/long + assertEquals(1.0 + 1L, exec("double x = 1; long y = 1; return x+y;")); + // double/float + assertEquals(1.0 + 1F, exec("double x = 1; float y = 1; return x+y;")); + // double/double + assertEquals(1.0 + 1.0, exec("double x = 1; double y = 1; return x+y;")); + } + + public void testBinaryPromotionConst() throws Exception { + // byte/byte + assertEquals((byte)1 + (byte)1, exec("return (byte)1 + (byte)1;")); + // byte/char + assertEquals((byte)1 + (char)1, exec("return (byte)1 + (char)1;")); + // byte/short + assertEquals((byte)1 + (short)1, exec("return (byte)1 + (short)1;")); + // byte/int + assertEquals((byte)1 + 1, exec("return (byte)1 + 1;")); + // byte/long + assertEquals((byte)1 + 1L, exec("return (byte)1 + 1L;")); + // byte/float + assertEquals((byte)1 + 1F, exec("return (byte)1 + 1F;")); + // byte/double + assertEquals((byte)1 + 1.0, exec("return (byte)1 + 1.0;")); + + // char/byte + assertEquals((char)1 + (byte)1, exec("return (char)1 + (byte)1;")); + // char/char + assertEquals((char)1 + (char)1, exec("return (char)1 + (char)1;")); + // char/short + assertEquals((char)1 + (short)1, exec("return (char)1 + (short)1;")); + // char/int + assertEquals((char)1 + 1, exec("return (char)1 + 1;")); + // char/long + assertEquals((char)1 + 1L, exec("return (char)1 + 1L;")); + // char/float + assertEquals((char)1 + 1F, exec("return (char)1 + 1F;")); + // char/double + assertEquals((char)1 + 1.0, exec("return (char)1 + 1.0;")); + + // short/byte + assertEquals((short)1 + (byte)1, exec("return (short)1 + (byte)1;")); + // short/char + assertEquals((short)1 + (char)1, exec("return (short)1 + (char)1;")); + // short/short + assertEquals((short)1 + (short)1, exec("return (short)1 + (short)1;")); + // short/int + assertEquals((short)1 + 1, exec("return (short)1 + 1;")); + // short/long + assertEquals((short)1 + 1L, exec("return (short)1 + 1L;")); + // short/float + assertEquals((short)1 + 1F, exec("return (short)1 + 1F;")); + // short/double + assertEquals((short)1 + 1.0, exec("return (short)1 + 1.0;")); + + // int/byte + assertEquals(1 + (byte)1, exec("return 1 + (byte)1;")); + // int/char + assertEquals(1 + (char)1, exec("return 1 + (char)1;")); + // int/short + assertEquals(1 + (short)1, exec("return 1 + (short)1;")); + // int/int + assertEquals(1 + 1, exec("return 1 + 1;")); + // int/long + assertEquals(1 + 1L, exec("return 1 + 1L;")); + // int/float + assertEquals(1 + 1F, exec("return 1 + 1F;")); + // int/double + assertEquals(1 + 1.0, exec("return 1 + 1.0;")); + + // long/byte + assertEquals(1L + (byte)1, exec("return 1L + (byte)1;")); + // long/char + assertEquals(1L + (char)1, exec("return 1L + (char)1;")); + // long/short + assertEquals(1L + (short)1, exec("return 1L + (short)1;")); + // long/int + assertEquals(1L + 1, exec("return 1L + 1;")); + // long/long + assertEquals(1L + 1L, exec("return 1L + 1L;")); + // long/float + assertEquals(1L + 1F, exec("return 1L + 1F;")); + // long/double + assertEquals(1L + 1.0, exec("return 1L + 1.0;")); + + // float/byte + assertEquals(1F + (byte)1, exec("return 1F + (byte)1;")); + // float/char + assertEquals(1F + (char)1, exec("return 1F + (char)1;")); + // float/short + assertEquals(1F + (short)1, exec("return 1F + (short)1;")); + // float/int + assertEquals(1F + 1, exec("return 1F + 1;")); + // float/long + assertEquals(1F + 1L, exec("return 1F + 1L;")); + // float/float + assertEquals(1F + 1F, exec("return 1F + 1F;")); + // float/double + assertEquals(1F + 1.0, exec("return 1F + 1.0;")); + + // double/byte + assertEquals(1.0 + (byte)1, exec("return 1.0 + (byte)1;")); + // double/char + assertEquals(1.0 + (char)1, exec("return 1.0 + (char)1;")); + // double/short + assertEquals(1.0 + (short)1, exec("return 1.0 + (short)1;")); + // double/int + assertEquals(1.0 + 1, exec("return 1.0 + 1;")); + // double/long + assertEquals(1.0 + 1L, exec("return 1.0 + 1L;")); + // double/float + assertEquals(1.0 + 1F, exec("return 1.0 + 1F;")); + // double/double + assertEquals(1.0 + 1.0, exec("return 1.0 + 1.0;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java new file mode 100644 index 00000000000..3af440ad02c --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/CompoundAssignmentTests.java @@ -0,0 +1,319 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** + * Tests compound assignments (+=, etc) across all data types + */ +public class CompoundAssignmentTests extends ScriptTestCase { + public void testAddition() { + // byte + assertEquals((byte) 15, exec("byte x = 5; x += 10; return x;")); + assertEquals((byte) -5, exec("byte x = 5; x += -10; return x;")); + + // short + assertEquals((short) 15, exec("short x = 5; x += 10; return x;")); + assertEquals((short) -5, exec("short x = 5; x += -10; return x;")); + // char + assertEquals((char) 15, exec("char x = 5; x += 10; return x;")); + assertEquals((char) 5, exec("char x = 10; x += -5; return x;")); + // int + assertEquals(15, exec("int x = 5; x += 10; return x;")); + assertEquals(-5, exec("int x = 5; x += -10; return x;")); + // long + assertEquals(15L, exec("long x = 5; x += 10; return x;")); + assertEquals(-5L, exec("long x = 5; x += -10; return x;")); + // float + assertEquals(15F, exec("float x = 5f; x += 10; return x;")); + assertEquals(-5F, exec("float x = 5f; x += -10; return x;")); + // double + assertEquals(15D, exec("double x = 5.0; x += 10; return x;")); + assertEquals(-5D, exec("double x = 5.0; x += -10; return x;")); + } + + public void testSubtraction() { + // byte + assertEquals((byte) 15, exec("byte x = 5; x -= -10; return x;")); + assertEquals((byte) -5, exec("byte x = 5; x -= 10; return x;")); + // short + assertEquals((short) 15, exec("short x = 5; x -= -10; return x;")); + assertEquals((short) -5, exec("short x = 5; x -= 10; return x;")); + // char + assertEquals((char) 15, exec("char x = 5; x -= -10; return x;")); + assertEquals((char) 5, exec("char x = 10; x -= 5; return x;")); + // int + assertEquals(15, exec("int x = 5; x -= -10; return x;")); + assertEquals(-5, exec("int x = 5; x -= 10; return x;")); + // long + assertEquals(15L, exec("long x = 5; x -= -10; return x;")); + assertEquals(-5L, exec("long x = 5; x -= 10; return x;")); + // float + assertEquals(15F, exec("float x = 5f; x -= -10; return x;")); + assertEquals(-5F, exec("float x = 5f; x -= 10; return x;")); + // double + assertEquals(15D, exec("double x = 5.0; x -= -10; return x;")); + assertEquals(-5D, exec("double x = 5.0; x -= 10; return x;")); + } + + public void testMultiplication() { + // byte + assertEquals((byte) 15, exec("byte x = 5; x *= 3; return x;")); + assertEquals((byte) -5, exec("byte x = 5; x *= -1; return x;")); + // short + assertEquals((short) 15, exec("short x = 5; x *= 3; return x;")); + assertEquals((short) -5, exec("short x = 5; x *= -1; return x;")); + // char + assertEquals((char) 15, exec("char x = 5; x *= 3; return x;")); + // int + assertEquals(15, exec("int x = 5; x *= 3; return x;")); + assertEquals(-5, exec("int x = 5; x *= -1; return x;")); + // long + assertEquals(15L, exec("long x = 5; x *= 3; return x;")); + assertEquals(-5L, exec("long x = 5; x *= -1; return x;")); + // float + assertEquals(15F, exec("float x = 5f; x *= 3; return x;")); + assertEquals(-5F, exec("float x = 5f; x *= -1; return x;")); + // double + assertEquals(15D, exec("double x = 5.0; x *= 3; return x;")); + assertEquals(-5D, exec("double x = 5.0; x *= -1; return x;")); + } + + public void testDivision() { + // byte + assertEquals((byte) 15, exec("byte x = 45; x /= 3; return x;")); + assertEquals((byte) -5, exec("byte x = 5; x /= -1; return x;")); + // short + assertEquals((short) 15, exec("short x = 45; x /= 3; return x;")); + assertEquals((short) -5, exec("short x = 5; x /= -1; return x;")); + // char + assertEquals((char) 15, exec("char x = 45; x /= 3; return x;")); + // int + assertEquals(15, exec("int x = 45; x /= 3; return x;")); + assertEquals(-5, exec("int x = 5; x /= -1; return x;")); + // long + assertEquals(15L, exec("long x = 45; x /= 3; return x;")); + assertEquals(-5L, exec("long x = 5; x /= -1; return x;")); + // float + assertEquals(15F, exec("float x = 45f; x /= 3; return x;")); + assertEquals(-5F, exec("float x = 5f; x /= -1; return x;")); + // double + assertEquals(15D, exec("double x = 45.0; x /= 3; return x;")); + assertEquals(-5D, exec("double x = 5.0; x /= -1; return x;")); + } + + public void testDivisionByZero() { + // byte + try { + exec("byte x = 1; x /= 0; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // short + try { + exec("short x = 1; x /= 0; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // char + try { + exec("char x = 1; x /= 0; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // int + try { + exec("int x = 1; x /= 0; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = 1; x /= 0; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testRemainder() { + // byte + assertEquals((byte) 3, exec("byte x = 15; x %= 4; return x;")); + assertEquals((byte) -3, exec("byte x = (byte) -15; x %= 4; return x;")); + // short + assertEquals((short) 3, exec("short x = 15; x %= 4; return x;")); + assertEquals((short) -3, exec("short x = (short) -15; x %= 4; return x;")); + // char + assertEquals((char) 3, exec("char x = (char) 15; x %= 4; return x;")); + // int + assertEquals(3, exec("int x = 15; x %= 4; return x;")); + assertEquals(-3, exec("int x = -15; x %= 4; return x;")); + // long + assertEquals(3L, exec("long x = 15L; x %= 4; return x;")); + assertEquals(-3L, exec("long x = -15L; x %= 4; return x;")); + // float + assertEquals(3F, exec("float x = 15F; x %= 4; return x;")); + assertEquals(-3F, exec("float x = -15F; x %= 4; return x;")); + // double + assertEquals(3D, exec("double x = 15.0; x %= 4; return x;")); + assertEquals(-3D, exec("double x = -15.0; x %= 4; return x;")); + } + + public void testLeftShift() { + // byte + assertEquals((byte) 60, exec("byte x = 15; x <<= 2; return x;")); + assertEquals((byte) -60, exec("byte x = (byte) -15; x <<= 2; return x;")); + // short + assertEquals((short) 60, exec("short x = 15; x <<= 2; return x;")); + assertEquals((short) -60, exec("short x = (short) -15; x <<= 2; return x;")); + // char + assertEquals((char) 60, exec("char x = (char) 15; x <<= 2; return x;")); + // int + assertEquals(60, exec("int x = 15; x <<= 2; return x;")); + assertEquals(-60, exec("int x = -15; x <<= 2; return x;")); + // long + assertEquals(60L, exec("long x = 15L; x <<= 2; return x;")); + assertEquals(-60L, exec("long x = -15L; x <<= 2; return x;")); + } + + public void testRightShift() { + // byte + assertEquals((byte) 15, exec("byte x = 60; x >>= 2; return x;")); + assertEquals((byte) -15, exec("byte x = (byte) -60; x >>= 2; return x;")); + // short + assertEquals((short) 15, exec("short x = 60; x >>= 2; return x;")); + assertEquals((short) -15, exec("short x = (short) -60; x >>= 2; return x;")); + // char + assertEquals((char) 15, exec("char x = (char) 60; x >>= 2; return x;")); + // int + assertEquals(15, exec("int x = 60; x >>= 2; return x;")); + assertEquals(-15, exec("int x = -60; x >>= 2; return x;")); + // long + assertEquals(15L, exec("long x = 60L; x >>= 2; return x;")); + assertEquals(-15L, exec("long x = -60L; x >>= 2; return x;")); + } + + public void testUnsignedRightShift() { + // byte + assertEquals((byte) 15, exec("byte x = 60; x >>>= 2; return x;")); + assertEquals((byte) -15, exec("byte x = (byte) -60; x >>>= 2; return x;")); + // short + assertEquals((short) 15, exec("short x = 60; x >>>= 2; return x;")); + assertEquals((short) -15, exec("short x = (short) -60; x >>>= 2; return x;")); + // char + assertEquals((char) 15, exec("char x = (char) 60; x >>>= 2; return x;")); + // int + assertEquals(15, exec("int x = 60; x >>>= 2; return x;")); + assertEquals(-60 >>> 2, exec("int x = -60; x >>>= 2; return x;")); + // long + assertEquals(15L, exec("long x = 60L; x >>>= 2; return x;")); + assertEquals(-60L >>> 2, exec("long x = -60L; x >>>= 2; return x;")); + } + + public void testAnd() { + // boolean + assertEquals(true, exec("boolean x = true; x &= true; return x;")); + assertEquals(false, exec("boolean x = true; x &= false; return x;")); + assertEquals(false, exec("boolean x = false; x &= true; return x;")); + assertEquals(false, exec("boolean x = false; x &= false; return x;")); + assertEquals(true, exec("Boolean x = true; x &= true; return x;")); + assertEquals(false, exec("Boolean x = true; x &= false; return x;")); + assertEquals(false, exec("Boolean x = false; x &= true; return x;")); + assertEquals(false, exec("Boolean x = false; x &= false; return x;")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= true; return x[0];")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= false; return x[0];")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= true; return x[0];")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= false; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= true; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= false; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= true; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= false; return x[0];")); + + // byte + assertEquals((byte) (13 & 14), exec("byte x = 13; x &= 14; return x;")); + // short + assertEquals((short) (13 & 14), exec("short x = 13; x &= 14; return x;")); + // char + assertEquals((char) (13 & 14), exec("char x = 13; x &= 14; return x;")); + // int + assertEquals(13 & 14, exec("int x = 13; x &= 14; return x;")); + // long + assertEquals((long) (13 & 14), exec("long x = 13L; x &= 14; return x;")); + } + + public void testOr() { + // boolean + assertEquals(true, exec("boolean x = true; x |= true; return x;")); + assertEquals(true, exec("boolean x = true; x |= false; return x;")); + assertEquals(true, exec("boolean x = false; x |= true; return x;")); + assertEquals(false, exec("boolean x = false; x |= false; return x;")); + assertEquals(true, exec("Boolean x = true; x |= true; return x;")); + assertEquals(true, exec("Boolean x = true; x |= false; return x;")); + assertEquals(true, exec("Boolean x = false; x |= true; return x;")); + assertEquals(false, exec("Boolean x = false; x |= false; return x;")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= true; return x[0];")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= false; return x[0];")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= true; return x[0];")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= false; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= true; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= false; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= true; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= false; return x[0];")); + + // byte + assertEquals((byte) (13 | 14), exec("byte x = 13; x |= 14; return x;")); + // short + assertEquals((short) (13 | 14), exec("short x = 13; x |= 14; return x;")); + // char + assertEquals((char) (13 | 14), exec("char x = 13; x |= 14; return x;")); + // int + assertEquals(13 | 14, exec("int x = 13; x |= 14; return x;")); + // long + assertEquals((long) (13 | 14), exec("long x = 13L; x |= 14; return x;")); + } + + public void testXor() { + // boolean + assertEquals(false, exec("boolean x = true; x ^= true; return x;")); + assertEquals(true, exec("boolean x = true; x ^= false; return x;")); + assertEquals(true, exec("boolean x = false; x ^= true; return x;")); + assertEquals(false, exec("boolean x = false; x ^= false; return x;")); + assertEquals(false, exec("Boolean x = true; x ^= true; return x;")); + assertEquals(true, exec("Boolean x = true; x ^= false; return x;")); + assertEquals(true, exec("Boolean x = false; x ^= true; return x;")); + assertEquals(false, exec("Boolean x = false; x ^= false; return x;")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); + assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); + assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); + assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); + assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); + + // byte + assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;")); + // short + assertEquals((short) (13 ^ 14), exec("short x = 13; x ^= 14; return x;")); + // char + assertEquals((char) (13 ^ 14), exec("char x = 13; x ^= 14; return x;")); + // int + assertEquals(13 ^ 14, exec("int x = 13; x ^= 14; return x;")); + // long + assertEquals((long) (13 ^ 14), exec("long x = 13L; x ^= 14; return x;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java new file mode 100644 index 00000000000..bc466427da7 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ConditionalTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.util.ArrayList; +import java.util.HashMap; + +public class ConditionalTests extends ScriptTestCase { + public void testBasic() { + assertEquals(2, exec("boolean x = true; return x ? 2 : 3;")); + assertEquals(3, exec("boolean x = false; return x ? 2 : 3;")); + assertEquals(3, exec("boolean x = false, y = true; return x && y ? 2 : 3;")); + assertEquals(2, exec("boolean x = true, y = true; return x && y ? 2 : 3;")); + assertEquals(2, exec("boolean x = true, y = false; return x || y ? 2 : 3;")); + assertEquals(3, exec("boolean x = false, y = false; return x || y ? 2 : 3;")); + } + + public void testPrecedence() { + assertEquals(4, exec("boolean x = false, y = true; return x ? (y ? 2 : 3) : 4;")); + assertEquals(2, exec("boolean x = true, y = true; return x ? (y ? 2 : 3) : 4;")); + assertEquals(3, exec("boolean x = true, y = false; return x ? (y ? 2 : 3) : 4;")); + assertEquals(2, exec("boolean x = true, y = true; return x ? y ? 2 : 3 : 4;")); + assertEquals(4, exec("boolean x = false, y = true; return x ? y ? 2 : 3 : 4;")); + assertEquals(3, exec("boolean x = true, y = false; return x ? y ? 2 : 3 : 4;")); + assertEquals(3, exec("boolean x = false, y = true; return x ? 2 : y ? 3 : 4;")); + assertEquals(2, exec("boolean x = true, y = false; return x ? 2 : y ? 3 : 4;")); + assertEquals(4, exec("boolean x = false, y = false; return x ? 2 : y ? 3 : 4;")); + assertEquals(4, exec("boolean x = false, y = false; return (x ? true : y) ? 3 : 4;")); + assertEquals(4, exec("boolean x = true, y = false; return (x ? false : y) ? 3 : 4;")); + assertEquals(3, exec("boolean x = false, y = true; return (x ? false : y) ? 3 : 4;")); + assertEquals(2, exec("boolean x = true, y = false; return (x ? false : y) ? (x ? 3 : 4) : x ? 2 : 1;")); + assertEquals(2, exec("boolean x = true, y = false; return (x ? false : y) ? x ? 3 : 4 : x ? 2 : 1;")); + assertEquals(4, exec("boolean x = false, y = true; return x ? false : y ? x ? 3 : 4 : x ? 2 : 1;")); + } + + public void testAssignment() { + assertEquals(4D, exec("boolean x = false; double z = x ? 2 : 4.0F; return z;")); + assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = x ? (byte)y : 7; return z;")); + assertEquals((byte)7, exec("boolean x = false; int y = 2; byte z = (byte)(x ? y : 7); return z;")); + assertEquals(ArrayList.class, exec("boolean x = false; Object z = x ? new HashMap() : new ArrayList(); return z;").getClass()); + } + + public void testNullArguments() { + assertEquals(null, exec("boolean b = false, c = true; Object x; Map y; return b && c ? x : y;")); + assertEquals(HashMap.class, exec("boolean b = false, c = true; Object x; Map y = new HashMap(); return b && c ? x : y;").getClass()); + } + + public void testPromotion() { + assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);")); + assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new Long(2) : new Float(4.0F));")); + assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));")); + assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new HashMap() : new ArrayList());")); + } + + public void testIncompatibleAssignment() { + try { + exec("boolean x = false; byte z = x ? 2 : 4.0F; return z;"); + fail("expected class cast exception"); + } catch (ClassCastException expected) {} + + try { + exec("boolean x = false; Map z = x ? 4 : (byte)7; return z;"); + fail("expected class cast exception"); + } catch (ClassCastException expected) {} + + try { + exec("boolean x = false; Map z = x ? new HashMap() : new ArrayList(); return z;"); + fail("expected class cast exception"); + } catch (ClassCastException expected) {} + + try { + exec("boolean x = false; int y = 2; byte z = x ? y : 7; return z;"); + fail("expected class cast exception"); + } catch (ClassCastException expected) {} + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java new file mode 100644 index 00000000000..6ff51131fe5 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DefTests.java @@ -0,0 +1,914 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +public class DefTests extends ScriptTestCase { + public void testNot() { + assertEquals(~1, exec("def x = (byte)1 return ~x")); + assertEquals(~1, exec("def x = (short)1 return ~x")); + assertEquals(~1, exec("def x = (char)1 return ~x")); + assertEquals(~1, exec("def x = 1 return ~x")); + assertEquals(~1L, exec("def x = 1L return ~x")); + } + + public void testNeg() { + assertEquals(-1, exec("def x = (byte)1 return -x")); + assertEquals(-1, exec("def x = (short)1 return -x")); + assertEquals(-1, exec("def x = (char)1 return -x")); + assertEquals(-1, exec("def x = 1 return -x")); + assertEquals(-1L, exec("def x = 1L return -x")); + assertEquals(-1.0F, exec("def x = 1F return -x")); + assertEquals(-1.0, exec("def x = 1.0 return -x")); + } + + public void testMul() { + assertEquals(4, exec("def x = (byte)2 def y = (byte)2 return x * y")); + assertEquals(4, exec("def x = (short)2 def y = (byte)2 return x * y")); + assertEquals(4, exec("def x = (char)2 def y = (byte)2 return x * y")); + assertEquals(4, exec("def x = (int)2 def y = (byte)2 return x * y")); + assertEquals(4L, exec("def x = (long)2 def y = (byte)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (byte)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (byte)2 return x * y")); + + assertEquals(4, exec("def x = (byte)2 def y = (short)2 return x * y")); + assertEquals(4, exec("def x = (short)2 def y = (short)2 return x * y")); + assertEquals(4, exec("def x = (char)2 def y = (short)2 return x * y")); + assertEquals(4, exec("def x = (int)2 def y = (short)2 return x * y")); + assertEquals(4L, exec("def x = (long)2 def y = (short)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (short)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (short)2 return x * y")); + + assertEquals(4, exec("def x = (byte)2 def y = (char)2 return x * y")); + assertEquals(4, exec("def x = (short)2 def y = (char)2 return x * y")); + assertEquals(4, exec("def x = (char)2 def y = (char)2 return x * y")); + assertEquals(4, exec("def x = (int)2 def y = (char)2 return x * y")); + assertEquals(4L, exec("def x = (long)2 def y = (char)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (char)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (char)2 return x * y")); + + assertEquals(4, exec("def x = (byte)2 def y = (int)2 return x * y")); + assertEquals(4, exec("def x = (short)2 def y = (int)2 return x * y")); + assertEquals(4, exec("def x = (char)2 def y = (int)2 return x * y")); + assertEquals(4, exec("def x = (int)2 def y = (int)2 return x * y")); + assertEquals(4L, exec("def x = (long)2 def y = (int)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (int)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (int)2 return x * y")); + + assertEquals(4L, exec("def x = (byte)2 def y = (long)2 return x * y")); + assertEquals(4L, exec("def x = (short)2 def y = (long)2 return x * y")); + assertEquals(4L, exec("def x = (char)2 def y = (long)2 return x * y")); + assertEquals(4L, exec("def x = (int)2 def y = (long)2 return x * y")); + assertEquals(4L, exec("def x = (long)2 def y = (long)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (long)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (long)2 return x * y")); + + assertEquals(4F, exec("def x = (byte)2 def y = (float)2 return x * y")); + assertEquals(4F, exec("def x = (short)2 def y = (float)2 return x * y")); + assertEquals(4F, exec("def x = (char)2 def y = (float)2 return x * y")); + assertEquals(4F, exec("def x = (int)2 def y = (float)2 return x * y")); + assertEquals(4F, exec("def x = (long)2 def y = (float)2 return x * y")); + assertEquals(4F, exec("def x = (float)2 def y = (float)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (float)2 return x * y")); + + assertEquals(4D, exec("def x = (byte)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (short)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (char)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (int)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (long)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (float)2 def y = (double)2 return x * y")); + assertEquals(4D, exec("def x = (double)2 def y = (double)2 return x * y")); + + assertEquals(4, exec("def x = (Byte)2 def y = (byte)2 return x * y")); + assertEquals(4, exec("def x = (Short)2 def y = (short)2 return x * y")); + assertEquals(4, exec("def x = (Character)2 def y = (char)2 return x * y")); + assertEquals(4, exec("def x = (Integer)2 def y = (int)2 return x * y")); + assertEquals(4L, exec("def x = (Long)2 def y = (long)2 return x * y")); + assertEquals(4F, exec("def x = (Float)2 def y = (float)2 return x * y")); + assertEquals(4D, exec("def x = (Double)2 def y = (double)2 return x * y")); + } + + public void testDiv() { + assertEquals(1, exec("def x = (byte)2 def y = (byte)2 return x / y")); + assertEquals(1, exec("def x = (short)2 def y = (byte)2 return x / y")); + assertEquals(1, exec("def x = (char)2 def y = (byte)2 return x / y")); + assertEquals(1, exec("def x = (int)2 def y = (byte)2 return x / y")); + assertEquals(1L, exec("def x = (long)2 def y = (byte)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (byte)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (byte)2 return x / y")); + + assertEquals(1, exec("def x = (byte)2 def y = (short)2 return x / y")); + assertEquals(1, exec("def x = (short)2 def y = (short)2 return x / y")); + assertEquals(1, exec("def x = (char)2 def y = (short)2 return x / y")); + assertEquals(1, exec("def x = (int)2 def y = (short)2 return x / y")); + assertEquals(1L, exec("def x = (long)2 def y = (short)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (short)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (short)2 return x / y")); + + assertEquals(1, exec("def x = (byte)2 def y = (char)2 return x / y")); + assertEquals(1, exec("def x = (short)2 def y = (char)2 return x / y")); + assertEquals(1, exec("def x = (char)2 def y = (char)2 return x / y")); + assertEquals(1, exec("def x = (int)2 def y = (char)2 return x / y")); + assertEquals(1L, exec("def x = (long)2 def y = (char)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (char)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (char)2 return x / y")); + + assertEquals(1, exec("def x = (byte)2 def y = (int)2 return x / y")); + assertEquals(1, exec("def x = (short)2 def y = (int)2 return x / y")); + assertEquals(1, exec("def x = (char)2 def y = (int)2 return x / y")); + assertEquals(1, exec("def x = (int)2 def y = (int)2 return x / y")); + assertEquals(1L, exec("def x = (long)2 def y = (int)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (int)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (int)2 return x / y")); + + assertEquals(1L, exec("def x = (byte)2 def y = (long)2 return x / y")); + assertEquals(1L, exec("def x = (short)2 def y = (long)2 return x / y")); + assertEquals(1L, exec("def x = (char)2 def y = (long)2 return x / y")); + assertEquals(1L, exec("def x = (int)2 def y = (long)2 return x / y")); + assertEquals(1L, exec("def x = (long)2 def y = (long)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (long)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (long)2 return x / y")); + + assertEquals(1F, exec("def x = (byte)2 def y = (float)2 return x / y")); + assertEquals(1F, exec("def x = (short)2 def y = (float)2 return x / y")); + assertEquals(1F, exec("def x = (char)2 def y = (float)2 return x / y")); + assertEquals(1F, exec("def x = (int)2 def y = (float)2 return x / y")); + assertEquals(1F, exec("def x = (long)2 def y = (float)2 return x / y")); + assertEquals(1F, exec("def x = (float)2 def y = (float)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (float)2 return x / y")); + + assertEquals(1D, exec("def x = (byte)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (short)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (char)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (int)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (long)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (float)2 def y = (double)2 return x / y")); + assertEquals(1D, exec("def x = (double)2 def y = (double)2 return x / y")); + + assertEquals(1, exec("def x = (Byte)2 def y = (byte)2 return x / y")); + assertEquals(1, exec("def x = (Short)2 def y = (short)2 return x / y")); + assertEquals(1, exec("def x = (Character)2 def y = (char)2 return x / y")); + assertEquals(1, exec("def x = (Integer)2 def y = (int)2 return x / y")); + assertEquals(1L, exec("def x = (Long)2 def y = (long)2 return x / y")); + assertEquals(1F, exec("def x = (Float)2 def y = (float)2 return x / y")); + assertEquals(1D, exec("def x = (Double)2 def y = (double)2 return x / y")); + } + + public void testRem() { + assertEquals(0, exec("def x = (byte)2 def y = (byte)2 return x % y")); + assertEquals(0, exec("def x = (short)2 def y = (byte)2 return x % y")); + assertEquals(0, exec("def x = (char)2 def y = (byte)2 return x % y")); + assertEquals(0, exec("def x = (int)2 def y = (byte)2 return x % y")); + assertEquals(0L, exec("def x = (long)2 def y = (byte)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (byte)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (byte)2 return x % y")); + + assertEquals(0, exec("def x = (byte)2 def y = (short)2 return x % y")); + assertEquals(0, exec("def x = (short)2 def y = (short)2 return x % y")); + assertEquals(0, exec("def x = (char)2 def y = (short)2 return x % y")); + assertEquals(0, exec("def x = (int)2 def y = (short)2 return x % y")); + assertEquals(0L, exec("def x = (long)2 def y = (short)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (short)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (short)2 return x % y")); + + assertEquals(0, exec("def x = (byte)2 def y = (char)2 return x % y")); + assertEquals(0, exec("def x = (short)2 def y = (char)2 return x % y")); + assertEquals(0, exec("def x = (char)2 def y = (char)2 return x % y")); + assertEquals(0, exec("def x = (int)2 def y = (char)2 return x % y")); + assertEquals(0L, exec("def x = (long)2 def y = (char)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (char)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (char)2 return x % y")); + + assertEquals(0, exec("def x = (byte)2 def y = (int)2 return x % y")); + assertEquals(0, exec("def x = (short)2 def y = (int)2 return x % y")); + assertEquals(0, exec("def x = (char)2 def y = (int)2 return x % y")); + assertEquals(0, exec("def x = (int)2 def y = (int)2 return x % y")); + assertEquals(0L, exec("def x = (long)2 def y = (int)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (int)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (int)2 return x % y")); + + assertEquals(0L, exec("def x = (byte)2 def y = (long)2 return x % y")); + assertEquals(0L, exec("def x = (short)2 def y = (long)2 return x % y")); + assertEquals(0L, exec("def x = (char)2 def y = (long)2 return x % y")); + assertEquals(0L, exec("def x = (int)2 def y = (long)2 return x % y")); + assertEquals(0L, exec("def x = (long)2 def y = (long)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (long)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (long)2 return x % y")); + + assertEquals(0F, exec("def x = (byte)2 def y = (float)2 return x % y")); + assertEquals(0F, exec("def x = (short)2 def y = (float)2 return x % y")); + assertEquals(0F, exec("def x = (char)2 def y = (float)2 return x % y")); + assertEquals(0F, exec("def x = (int)2 def y = (float)2 return x % y")); + assertEquals(0F, exec("def x = (long)2 def y = (float)2 return x % y")); + assertEquals(0F, exec("def x = (float)2 def y = (float)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (float)2 return x % y")); + + assertEquals(0D, exec("def x = (byte)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (short)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (char)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (int)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (long)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (float)2 def y = (double)2 return x % y")); + assertEquals(0D, exec("def x = (double)2 def y = (double)2 return x % y")); + + assertEquals(0, exec("def x = (Byte)2 def y = (byte)2 return x % y")); + assertEquals(0, exec("def x = (Short)2 def y = (short)2 return x % y")); + assertEquals(0, exec("def x = (Character)2 def y = (char)2 return x % y")); + assertEquals(0, exec("def x = (Integer)2 def y = (int)2 return x % y")); + assertEquals(0L, exec("def x = (Long)2 def y = (long)2 return x % y")); + assertEquals(0F, exec("def x = (Float)2 def y = (float)2 return x % y")); + assertEquals(0D, exec("def x = (Double)2 def y = (double)2 return x % y")); + } + + public void testAdd() { + assertEquals(2, exec("def x = (byte)1 def y = (byte)1 return x + y")); + assertEquals(2, exec("def x = (short)1 def y = (byte)1 return x + y")); + assertEquals(2, exec("def x = (char)1 def y = (byte)1 return x + y")); + assertEquals(2, exec("def x = (int)1 def y = (byte)1 return x + y")); + assertEquals(2L, exec("def x = (long)1 def y = (byte)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (byte)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (byte)1 return x + y")); + + assertEquals(2, exec("def x = (byte)1 def y = (short)1 return x + y")); + assertEquals(2, exec("def x = (short)1 def y = (short)1 return x + y")); + assertEquals(2, exec("def x = (char)1 def y = (short)1 return x + y")); + assertEquals(2, exec("def x = (int)1 def y = (short)1 return x + y")); + assertEquals(2L, exec("def x = (long)1 def y = (short)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (short)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (short)1 return x + y")); + + assertEquals(2, exec("def x = (byte)1 def y = (char)1 return x + y")); + assertEquals(2, exec("def x = (short)1 def y = (char)1 return x + y")); + assertEquals(2, exec("def x = (char)1 def y = (char)1 return x + y")); + assertEquals(2, exec("def x = (int)1 def y = (char)1 return x + y")); + assertEquals(2L, exec("def x = (long)1 def y = (char)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (char)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (char)1 return x + y")); + + assertEquals(2, exec("def x = (byte)1 def y = (int)1 return x + y")); + assertEquals(2, exec("def x = (short)1 def y = (int)1 return x + y")); + assertEquals(2, exec("def x = (char)1 def y = (int)1 return x + y")); + assertEquals(2, exec("def x = (int)1 def y = (int)1 return x + y")); + assertEquals(2L, exec("def x = (long)1 def y = (int)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (int)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (int)1 return x + y")); + + assertEquals(2L, exec("def x = (byte)1 def y = (long)1 return x + y")); + assertEquals(2L, exec("def x = (short)1 def y = (long)1 return x + y")); + assertEquals(2L, exec("def x = (char)1 def y = (long)1 return x + y")); + assertEquals(2L, exec("def x = (int)1 def y = (long)1 return x + y")); + assertEquals(2L, exec("def x = (long)1 def y = (long)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (long)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (long)1 return x + y")); + + assertEquals(2F, exec("def x = (byte)1 def y = (float)1 return x + y")); + assertEquals(2F, exec("def x = (short)1 def y = (float)1 return x + y")); + assertEquals(2F, exec("def x = (char)1 def y = (float)1 return x + y")); + assertEquals(2F, exec("def x = (int)1 def y = (float)1 return x + y")); + assertEquals(2F, exec("def x = (long)1 def y = (float)1 return x + y")); + assertEquals(2F, exec("def x = (float)1 def y = (float)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (float)1 return x + y")); + + assertEquals(2D, exec("def x = (byte)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (short)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (char)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (int)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (long)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (float)1 def y = (double)1 return x + y")); + assertEquals(2D, exec("def x = (double)1 def y = (double)1 return x + y")); + + assertEquals(2, exec("def x = (Byte)1 def y = (byte)1 return x + y")); + assertEquals(2, exec("def x = (Short)1 def y = (short)1 return x + y")); + assertEquals(2, exec("def x = (Character)1 def y = (char)1 return x + y")); + assertEquals(2, exec("def x = (Integer)1 def y = (int)1 return x + y")); + assertEquals(2L, exec("def x = (Long)1 def y = (long)1 return x + y")); + assertEquals(2F, exec("def x = (Float)1 def y = (float)1 return x + y")); + assertEquals(2D, exec("def x = (Double)1 def y = (double)1 return x + y")); + } + + public void testSub() { + assertEquals(0, exec("def x = (byte)1 def y = (byte)1 return x - y")); + assertEquals(0, exec("def x = (short)1 def y = (byte)1 return x - y")); + assertEquals(0, exec("def x = (char)1 def y = (byte)1 return x - y")); + assertEquals(0, exec("def x = (int)1 def y = (byte)1 return x - y")); + assertEquals(0L, exec("def x = (long)1 def y = (byte)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (byte)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (byte)1 return x - y")); + + assertEquals(0, exec("def x = (byte)1 def y = (short)1 return x - y")); + assertEquals(0, exec("def x = (short)1 def y = (short)1 return x - y")); + assertEquals(0, exec("def x = (char)1 def y = (short)1 return x - y")); + assertEquals(0, exec("def x = (int)1 def y = (short)1 return x - y")); + assertEquals(0L, exec("def x = (long)1 def y = (short)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (short)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (short)1 return x - y")); + + assertEquals(0, exec("def x = (byte)1 def y = (char)1 return x - y")); + assertEquals(0, exec("def x = (short)1 def y = (char)1 return x - y")); + assertEquals(0, exec("def x = (char)1 def y = (char)1 return x - y")); + assertEquals(0, exec("def x = (int)1 def y = (char)1 return x - y")); + assertEquals(0L, exec("def x = (long)1 def y = (char)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (char)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (char)1 return x - y")); + + assertEquals(0, exec("def x = (byte)1 def y = (int)1 return x - y")); + assertEquals(0, exec("def x = (short)1 def y = (int)1 return x - y")); + assertEquals(0, exec("def x = (char)1 def y = (int)1 return x - y")); + assertEquals(0, exec("def x = (int)1 def y = (int)1 return x - y")); + assertEquals(0L, exec("def x = (long)1 def y = (int)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (int)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (int)1 return x - y")); + + assertEquals(0L, exec("def x = (byte)1 def y = (long)1 return x - y")); + assertEquals(0L, exec("def x = (short)1 def y = (long)1 return x - y")); + assertEquals(0L, exec("def x = (char)1 def y = (long)1 return x - y")); + assertEquals(0L, exec("def x = (int)1 def y = (long)1 return x - y")); + assertEquals(0L, exec("def x = (long)1 def y = (long)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (long)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (long)1 return x - y")); + + assertEquals(0F, exec("def x = (byte)1 def y = (float)1 return x - y")); + assertEquals(0F, exec("def x = (short)1 def y = (float)1 return x - y")); + assertEquals(0F, exec("def x = (char)1 def y = (float)1 return x - y")); + assertEquals(0F, exec("def x = (int)1 def y = (float)1 return x - y")); + assertEquals(0F, exec("def x = (long)1 def y = (float)1 return x - y")); + assertEquals(0F, exec("def x = (float)1 def y = (float)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (float)1 return x - y")); + + assertEquals(0D, exec("def x = (byte)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (short)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (char)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (int)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (long)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (float)1 def y = (double)1 return x - y")); + assertEquals(0D, exec("def x = (double)1 def y = (double)1 return x - y")); + + assertEquals(0, exec("def x = (Byte)1 def y = (byte)1 return x - y")); + assertEquals(0, exec("def x = (Short)1 def y = (short)1 return x - y")); + assertEquals(0, exec("def x = (Character)1 def y = (char)1 return x - y")); + assertEquals(0, exec("def x = (Integer)1 def y = (int)1 return x - y")); + assertEquals(0L, exec("def x = (Long)1 def y = (long)1 return x - y")); + assertEquals(0F, exec("def x = (Float)1 def y = (float)1 return x - y")); + assertEquals(0D, exec("def x = (Double)1 def y = (double)1 return x - y")); + } + + public void testLsh() { + assertEquals(2, exec("def x = (byte)1 def y = (byte)1 return x << y")); + assertEquals(2, exec("def x = (short)1 def y = (byte)1 return x << y")); + assertEquals(2, exec("def x = (char)1 def y = (byte)1 return x << y")); + assertEquals(2, exec("def x = (int)1 def y = (byte)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (byte)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (byte)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (byte)1 return x << y")); + + assertEquals(2, exec("def x = (byte)1 def y = (short)1 return x << y")); + assertEquals(2, exec("def x = (short)1 def y = (short)1 return x << y")); + assertEquals(2, exec("def x = (char)1 def y = (short)1 return x << y")); + assertEquals(2, exec("def x = (int)1 def y = (short)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (short)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (short)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (short)1 return x << y")); + + assertEquals(2, exec("def x = (byte)1 def y = (char)1 return x << y")); + assertEquals(2, exec("def x = (short)1 def y = (char)1 return x << y")); + assertEquals(2, exec("def x = (char)1 def y = (char)1 return x << y")); + assertEquals(2, exec("def x = (int)1 def y = (char)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (char)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (char)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (char)1 return x << y")); + + assertEquals(2, exec("def x = (byte)1 def y = (int)1 return x << y")); + assertEquals(2, exec("def x = (short)1 def y = (int)1 return x << y")); + assertEquals(2, exec("def x = (char)1 def y = (int)1 return x << y")); + assertEquals(2, exec("def x = (int)1 def y = (int)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (int)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (int)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (int)1 return x << y")); + + assertEquals(2L, exec("def x = (byte)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (short)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (char)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (int)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (long)1 return x << y")); + + assertEquals(2L, exec("def x = (byte)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (short)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (char)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (int)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (float)1 return x << y")); + + assertEquals(2L, exec("def x = (byte)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (short)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (char)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (int)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (long)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (float)1 def y = (double)1 return x << y")); + assertEquals(2L, exec("def x = (double)1 def y = (double)1 return x << y")); + + assertEquals(2, exec("def x = (Byte)1 def y = (byte)1 return x << y")); + assertEquals(2, exec("def x = (Short)1 def y = (short)1 return x << y")); + assertEquals(2, exec("def x = (Character)1 def y = (char)1 return x << y")); + assertEquals(2, exec("def x = (Integer)1 def y = (int)1 return x << y")); + assertEquals(2L, exec("def x = (Long)1 def y = (long)1 return x << y")); + assertEquals(2L, exec("def x = (Float)1 def y = (float)1 return x << y")); + assertEquals(2L, exec("def x = (Double)1 def y = (double)1 return x << y")); + } + + public void testRsh() { + assertEquals(2, exec("def x = (byte)4 def y = (byte)1 return x >> y")); + assertEquals(2, exec("def x = (short)4 def y = (byte)1 return x >> y")); + assertEquals(2, exec("def x = (char)4 def y = (byte)1 return x >> y")); + assertEquals(2, exec("def x = (int)4 def y = (byte)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (byte)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (byte)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (byte)1 return x >> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (short)1 return x >> y")); + assertEquals(2, exec("def x = (short)4 def y = (short)1 return x >> y")); + assertEquals(2, exec("def x = (char)4 def y = (short)1 return x >> y")); + assertEquals(2, exec("def x = (int)4 def y = (short)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (short)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (short)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (short)1 return x >> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (char)1 return x >> y")); + assertEquals(2, exec("def x = (short)4 def y = (char)1 return x >> y")); + assertEquals(2, exec("def x = (char)4 def y = (char)1 return x >> y")); + assertEquals(2, exec("def x = (int)4 def y = (char)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (char)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (char)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (char)1 return x >> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (int)1 return x >> y")); + assertEquals(2, exec("def x = (short)4 def y = (int)1 return x >> y")); + assertEquals(2, exec("def x = (char)4 def y = (int)1 return x >> y")); + assertEquals(2, exec("def x = (int)4 def y = (int)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (int)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >> y")); + assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >> y")); + + assertEquals(2, exec("def x = (Byte)4 def y = (byte)1 return x >> y")); + assertEquals(2, exec("def x = (Short)4 def y = (short)1 return x >> y")); + assertEquals(2, exec("def x = (Character)4 def y = (char)1 return x >> y")); + assertEquals(2, exec("def x = (Integer)4 def y = (int)1 return x >> y")); + assertEquals(2L, exec("def x = (Long)4 def y = (long)1 return x >> y")); + assertEquals(2L, exec("def x = (Float)4 def y = (float)1 return x >> y")); + assertEquals(2L, exec("def x = (Double)4 def y = (double)1 return x >> y")); + } + + public void testUsh() { + assertEquals(2, exec("def x = (byte)4 def y = (byte)1 return x >>> y")); + assertEquals(2, exec("def x = (short)4 def y = (byte)1 return x >>> y")); + assertEquals(2, exec("def x = (char)4 def y = (byte)1 return x >>> y")); + assertEquals(2, exec("def x = (int)4 def y = (byte)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (byte)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (byte)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (byte)1 return x >>> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (short)1 return x >>> y")); + assertEquals(2, exec("def x = (short)4 def y = (short)1 return x >>> y")); + assertEquals(2, exec("def x = (char)4 def y = (short)1 return x >>> y")); + assertEquals(2, exec("def x = (int)4 def y = (short)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (short)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (short)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (short)1 return x >>> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (char)1 return x >>> y")); + assertEquals(2, exec("def x = (short)4 def y = (char)1 return x >>> y")); + assertEquals(2, exec("def x = (char)4 def y = (char)1 return x >>> y")); + assertEquals(2, exec("def x = (int)4 def y = (char)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (char)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (char)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (char)1 return x >>> y")); + + assertEquals(2, exec("def x = (byte)4 def y = (int)1 return x >>> y")); + assertEquals(2, exec("def x = (short)4 def y = (int)1 return x >>> y")); + assertEquals(2, exec("def x = (char)4 def y = (int)1 return x >>> y")); + assertEquals(2, exec("def x = (int)4 def y = (int)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (int)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (int)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (int)1 return x >>> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (short)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (char)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (int)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (long)1 return x >>> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (short)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (char)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (int)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (float)1 return x >>> y")); + + assertEquals(2L, exec("def x = (byte)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (short)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (char)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (int)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (long)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (float)4 def y = (double)1 return x >>> y")); + assertEquals(2L, exec("def x = (double)4 def y = (double)1 return x >>> y")); + + assertEquals(2, exec("def x = (Byte)4 def y = (byte)1 return x >>> y")); + assertEquals(2, exec("def x = (Short)4 def y = (short)1 return x >>> y")); + assertEquals(2, exec("def x = (Character)4 def y = (char)1 return x >>> y")); + assertEquals(2, exec("def x = (Integer)4 def y = (int)1 return x >>> y")); + assertEquals(2L, exec("def x = (Long)4 def y = (long)1 return x >>> y")); + assertEquals(2L, exec("def x = (Float)4 def y = (float)1 return x >>> y")); + assertEquals(2L, exec("def x = (Double)4 def y = (double)1 return x >>> y")); + } + + public void testAnd() { + assertEquals(0, exec("def x = (byte)4 def y = (byte)1 return x & y")); + assertEquals(0, exec("def x = (short)4 def y = (byte)1 return x & y")); + assertEquals(0, exec("def x = (char)4 def y = (byte)1 return x & y")); + assertEquals(0, exec("def x = (int)4 def y = (byte)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (byte)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (byte)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (byte)1 return x & y")); + + assertEquals(0, exec("def x = (byte)4 def y = (short)1 return x & y")); + assertEquals(0, exec("def x = (short)4 def y = (short)1 return x & y")); + assertEquals(0, exec("def x = (char)4 def y = (short)1 return x & y")); + assertEquals(0, exec("def x = (int)4 def y = (short)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (short)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (short)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (short)1 return x & y")); + + assertEquals(0, exec("def x = (byte)4 def y = (char)1 return x & y")); + assertEquals(0, exec("def x = (short)4 def y = (char)1 return x & y")); + assertEquals(0, exec("def x = (char)4 def y = (char)1 return x & y")); + assertEquals(0, exec("def x = (int)4 def y = (char)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (char)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (char)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (char)1 return x & y")); + + assertEquals(0, exec("def x = (byte)4 def y = (int)1 return x & y")); + assertEquals(0, exec("def x = (short)4 def y = (int)1 return x & y")); + assertEquals(0, exec("def x = (char)4 def y = (int)1 return x & y")); + assertEquals(0, exec("def x = (int)4 def y = (int)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (int)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (int)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (int)1 return x & y")); + + assertEquals(0L, exec("def x = (byte)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (short)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (char)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (int)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (long)1 return x & y")); + + assertEquals(0L, exec("def x = (byte)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (short)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (char)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (int)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (float)1 return x & y")); + + assertEquals(0L, exec("def x = (byte)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (short)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (char)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (int)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (long)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (float)4 def y = (double)1 return x & y")); + assertEquals(0L, exec("def x = (double)4 def y = (double)1 return x & y")); + + assertEquals(0, exec("def x = (Byte)4 def y = (byte)1 return x & y")); + assertEquals(0, exec("def x = (Short)4 def y = (short)1 return x & y")); + assertEquals(0, exec("def x = (Character)4 def y = (char)1 return x & y")); + assertEquals(0, exec("def x = (Integer)4 def y = (int)1 return x & y")); + assertEquals(0L, exec("def x = (Long)4 def y = (long)1 return x & y")); + assertEquals(0L, exec("def x = (Float)4 def y = (float)1 return x & y")); + assertEquals(0L, exec("def x = (Double)4 def y = (double)1 return x & y")); + } + + public void testXor() { + assertEquals(5, exec("def x = (byte)4 def y = (byte)1 return x ^ y")); + assertEquals(5, exec("def x = (short)4 def y = (byte)1 return x ^ y")); + assertEquals(5, exec("def x = (char)4 def y = (byte)1 return x ^ y")); + assertEquals(5, exec("def x = (int)4 def y = (byte)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (byte)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (byte)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (byte)1 return x ^ y")); + + assertEquals(5, exec("def x = (byte)4 def y = (short)1 return x ^ y")); + assertEquals(5, exec("def x = (short)4 def y = (short)1 return x ^ y")); + assertEquals(5, exec("def x = (char)4 def y = (short)1 return x ^ y")); + assertEquals(5, exec("def x = (int)4 def y = (short)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (short)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (short)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (short)1 return x ^ y")); + + assertEquals(5, exec("def x = (byte)4 def y = (char)1 return x ^ y")); + assertEquals(5, exec("def x = (short)4 def y = (char)1 return x ^ y")); + assertEquals(5, exec("def x = (char)4 def y = (char)1 return x ^ y")); + assertEquals(5, exec("def x = (int)4 def y = (char)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (char)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (char)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (char)1 return x ^ y")); + + assertEquals(5, exec("def x = (byte)4 def y = (int)1 return x ^ y")); + assertEquals(5, exec("def x = (short)4 def y = (int)1 return x ^ y")); + assertEquals(5, exec("def x = (char)4 def y = (int)1 return x ^ y")); + assertEquals(5, exec("def x = (int)4 def y = (int)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (int)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (int)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (int)1 return x ^ y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (short)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (char)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (int)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (long)1 return x ^ y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (short)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (char)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (int)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (float)1 return x ^ y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (short)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (char)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (int)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (long)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (float)4 def y = (double)1 return x ^ y")); + assertEquals(5L, exec("def x = (double)4 def y = (double)1 return x ^ y")); + + assertEquals(5, exec("def x = (Byte)4 def y = (byte)1 return x ^ y")); + assertEquals(5, exec("def x = (Short)4 def y = (short)1 return x ^ y")); + assertEquals(5, exec("def x = (Character)4 def y = (char)1 return x ^ y")); + assertEquals(5, exec("def x = (Integer)4 def y = (int)1 return x ^ y")); + assertEquals(5L, exec("def x = (Long)4 def y = (long)1 return x ^ y")); + assertEquals(5L, exec("def x = (Float)4 def y = (float)1 return x ^ y")); + assertEquals(5L, exec("def x = (Double)4 def y = (double)1 return x ^ y")); + } + + public void testOr() { + assertEquals(5, exec("def x = (byte)4 def y = (byte)1 return x | y")); + assertEquals(5, exec("def x = (short)4 def y = (byte)1 return x | y")); + assertEquals(5, exec("def x = (char)4 def y = (byte)1 return x | y")); + assertEquals(5, exec("def x = (int)4 def y = (byte)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (byte)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (byte)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (byte)1 return x | y")); + + assertEquals(5, exec("def x = (byte)4 def y = (short)1 return x | y")); + assertEquals(5, exec("def x = (short)4 def y = (short)1 return x | y")); + assertEquals(5, exec("def x = (char)4 def y = (short)1 return x | y")); + assertEquals(5, exec("def x = (int)4 def y = (short)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (short)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (short)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (short)1 return x | y")); + + assertEquals(5, exec("def x = (byte)4 def y = (char)1 return x | y")); + assertEquals(5, exec("def x = (short)4 def y = (char)1 return x | y")); + assertEquals(5, exec("def x = (char)4 def y = (char)1 return x | y")); + assertEquals(5, exec("def x = (int)4 def y = (char)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (char)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (char)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (char)1 return x | y")); + + assertEquals(5, exec("def x = (byte)4 def y = (int)1 return x | y")); + assertEquals(5, exec("def x = (short)4 def y = (int)1 return x | y")); + assertEquals(5, exec("def x = (char)4 def y = (int)1 return x | y")); + assertEquals(5, exec("def x = (int)4 def y = (int)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (int)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (int)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (int)1 return x | y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (short)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (char)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (int)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (long)1 return x | y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (short)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (char)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (int)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (float)1 return x | y")); + + assertEquals(5L, exec("def x = (byte)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (short)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (char)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (int)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (long)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (float)4 def y = (double)1 return x | y")); + assertEquals(5L, exec("def x = (double)4 def y = (double)1 return x | y")); + + assertEquals(5, exec("def x = (Byte)4 def y = (byte)1 return x | y")); + assertEquals(5, exec("def x = (Short)4 def y = (short)1 return x | y")); + assertEquals(5, exec("def x = (Character)4 def y = (char)1 return x | y")); + assertEquals(5, exec("def x = (Integer)4 def y = (int)1 return x | y")); + assertEquals(5L, exec("def x = (Long)4 def y = (long)1 return x | y")); + assertEquals(5L, exec("def x = (Float)4 def y = (float)1 return x | y")); + assertEquals(5L, exec("def x = (Double)4 def y = (double)1 return x | y")); + } + + public void testEq() { + assertEquals(true, exec("def x = (byte)7 def y = (int)7 return x == y")); + assertEquals(true, exec("def x = (short)6 def y = (int)6 return x == y")); + assertEquals(true, exec("def x = (char)5 def y = (int)5 return x == y")); + assertEquals(true, exec("def x = (int)4 def y = (int)4 return x == y")); + assertEquals(false, exec("def x = (long)5 def y = (int)3 return x == y")); + assertEquals(false, exec("def x = (float)6 def y = (int)2 return x == y")); + assertEquals(false, exec("def x = (double)7 def y = (int)1 return x == y")); + + assertEquals(true, exec("def x = (byte)7 def y = (double)7 return x == y")); + assertEquals(true, exec("def x = (short)6 def y = (double)6 return x == y")); + assertEquals(true, exec("def x = (char)5 def y = (double)5 return x == y")); + assertEquals(true, exec("def x = (int)4 def y = (double)4 return x == y")); + assertEquals(false, exec("def x = (long)5 def y = (double)3 return x == y")); + assertEquals(false, exec("def x = (float)6 def y = (double)2 return x == y")); + assertEquals(false, exec("def x = (double)7 def y = (double)1 return x == y")); + + assertEquals(true, exec("def x = new HashMap() def y = new HashMap() return x == y")); + assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x == y")); + assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x == y")); + assertEquals(true, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x == y")); + } + + public void testEqr() { + assertEquals(false, exec("def x = (byte)7 def y = (int)7 return x === y")); + assertEquals(false, exec("def x = (short)6 def y = (int)6 return x === y")); + assertEquals(false, exec("def x = (char)5 def y = (int)5 return x === y")); + assertEquals(true, exec("def x = (int)4 def y = (int)4 return x === y")); + assertEquals(false, exec("def x = (long)5 def y = (int)3 return x === y")); + assertEquals(false, exec("def x = (float)6 def y = (int)2 return x === y")); + assertEquals(false, exec("def x = (double)7 def y = (int)1 return x === y")); + + assertEquals(false, exec("def x = new HashMap() def y = new HashMap() return x === y")); + assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x === y")); + assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x === y")); + assertEquals(true, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x === y")); + } + + public void testNe() { + assertEquals(false, exec("def x = (byte)7 def y = (int)7 return x != y")); + assertEquals(false, exec("def x = (short)6 def y = (int)6 return x != y")); + assertEquals(false, exec("def x = (char)5 def y = (int)5 return x != y")); + assertEquals(false, exec("def x = (int)4 def y = (int)4 return x != y")); + assertEquals(true, exec("def x = (long)5 def y = (int)3 return x != y")); + assertEquals(true, exec("def x = (float)6 def y = (int)2 return x != y")); + assertEquals(true, exec("def x = (double)7 def y = (int)1 return x != y")); + + assertEquals(false, exec("def x = (byte)7 def y = (double)7 return x != y")); + assertEquals(false, exec("def x = (short)6 def y = (double)6 return x != y")); + assertEquals(false, exec("def x = (char)5 def y = (double)5 return x != y")); + assertEquals(false, exec("def x = (int)4 def y = (double)4 return x != y")); + assertEquals(true, exec("def x = (long)5 def y = (double)3 return x != y")); + assertEquals(true, exec("def x = (float)6 def y = (double)2 return x != y")); + assertEquals(true, exec("def x = (double)7 def y = (double)1 return x != y")); + + assertEquals(false, exec("def x = new HashMap() def y = new HashMap() return x != y")); + assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x != y")); + assertEquals(false, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x != y")); + assertEquals(false, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x != y")); + } + + public void testNer() { + assertEquals(true, exec("def x = (byte)7 def y = (int)7 return x !== y")); + assertEquals(true, exec("def x = (short)6 def y = (int)6 return x !== y")); + assertEquals(true, exec("def x = (char)5 def y = (int)5 return x !== y")); + assertEquals(false, exec("def x = (int)4 def y = (int)4 return x !== y")); + assertEquals(true, exec("def x = (long)5 def y = (int)3 return x !== y")); + assertEquals(true, exec("def x = (float)6 def y = (int)2 return x !== y")); + assertEquals(true, exec("def x = (double)7 def y = (int)1 return x !== y")); + + assertEquals(true, exec("def x = new HashMap() def y = new HashMap() return x !== y")); + assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() return x !== y")); + assertEquals(true, exec("def x = new HashMap() x.put(3, 3) def y = new HashMap() y.put(3, 3) return x !== y")); + assertEquals(false, exec("def x = new HashMap() def y = x x.put(3, 3) y.put(3, 3) return x !== y")); + } + + public void testLt() { + assertEquals(true, exec("def x = (byte)1 def y = (int)7 return x < y")); + assertEquals(true, exec("def x = (short)2 def y = (int)6 return x < y")); + assertEquals(true, exec("def x = (char)3 def y = (int)5 return x < y")); + assertEquals(false, exec("def x = (int)4 def y = (int)4 return x < y")); + assertEquals(false, exec("def x = (long)5 def y = (int)3 return x < y")); + assertEquals(false, exec("def x = (float)6 def y = (int)2 return x < y")); + assertEquals(false, exec("def x = (double)7 def y = (int)1 return x < y")); + + assertEquals(true, exec("def x = (byte)1 def y = (double)7 return x < y")); + assertEquals(true, exec("def x = (short)2 def y = (double)6 return x < y")); + assertEquals(true, exec("def x = (char)3 def y = (double)5 return x < y")); + assertEquals(false, exec("def x = (int)4 def y = (double)4 return x < y")); + assertEquals(false, exec("def x = (long)5 def y = (double)3 return x < y")); + assertEquals(false, exec("def x = (float)6 def y = (double)2 return x < y")); + assertEquals(false, exec("def x = (double)7 def y = (double)1 return x < y")); + } + + public void testLte() { + assertEquals(true, exec("def x = (byte)1 def y = (int)7 return x <= y")); + assertEquals(true, exec("def x = (short)2 def y = (int)6 return x <= y")); + assertEquals(true, exec("def x = (char)3 def y = (int)5 return x <= y")); + assertEquals(true, exec("def x = (int)4 def y = (int)4 return x <= y")); + assertEquals(false, exec("def x = (long)5 def y = (int)3 return x <= y")); + assertEquals(false, exec("def x = (float)6 def y = (int)2 return x <= y")); + assertEquals(false, exec("def x = (double)7 def y = (int)1 return x <= y")); + + assertEquals(true, exec("def x = (byte)1 def y = (double)7 return x <= y")); + assertEquals(true, exec("def x = (short)2 def y = (double)6 return x <= y")); + assertEquals(true, exec("def x = (char)3 def y = (double)5 return x <= y")); + assertEquals(true, exec("def x = (int)4 def y = (double)4 return x <= y")); + assertEquals(false, exec("def x = (long)5 def y = (double)3 return x <= y")); + assertEquals(false, exec("def x = (float)6 def y = (double)2 return x <= y")); + assertEquals(false, exec("def x = (double)7 def y = (double)1 return x <= y")); + } + + public void testGt() { + assertEquals(false, exec("def x = (byte)1 def y = (int)7 return x > y")); + assertEquals(false, exec("def x = (short)2 def y = (int)6 return x > y")); + assertEquals(false, exec("def x = (char)3 def y = (int)5 return x > y")); + assertEquals(false, exec("def x = (int)4 def y = (int)4 return x > y")); + assertEquals(true, exec("def x = (long)5 def y = (int)3 return x > y")); + assertEquals(true, exec("def x = (float)6 def y = (int)2 return x > y")); + assertEquals(true, exec("def x = (double)7 def y = (int)1 return x > y")); + + assertEquals(false, exec("def x = (byte)1 def y = (double)7 return x > y")); + assertEquals(false, exec("def x = (short)2 def y = (double)6 return x > y")); + assertEquals(false, exec("def x = (char)3 def y = (double)5 return x > y")); + assertEquals(false, exec("def x = (int)4 def y = (double)4 return x > y")); + assertEquals(true, exec("def x = (long)5 def y = (double)3 return x > y")); + assertEquals(true, exec("def x = (float)6 def y = (double)2 return x > y")); + assertEquals(true, exec("def x = (double)7 def y = (double)1 return x > y")); + } + + public void testGte() { + assertEquals(false, exec("def x = (byte)1 def y = (int)7 return x >= y")); + assertEquals(false, exec("def x = (short)2 def y = (int)6 return x >= y")); + assertEquals(false, exec("def x = (char)3 def y = (int)5 return x >= y")); + assertEquals(true, exec("def x = (int)4 def y = (int)4 return x >= y")); + assertEquals(true, exec("def x = (long)5 def y = (int)3 return x >= y")); + assertEquals(true, exec("def x = (float)6 def y = (int)2 return x >= y")); + assertEquals(true, exec("def x = (double)7 def y = (int)1 return x >= y")); + + assertEquals(false, exec("def x = (byte)1 def y = (double)7 return x >= y")); + assertEquals(false, exec("def x = (short)2 def y = (double)6 return x >= y")); + assertEquals(false, exec("def x = (char)3 def y = (double)5 return x >= y")); + assertEquals(true, exec("def x = (int)4 def y = (double)4 return x >= y")); + assertEquals(true, exec("def x = (long)5 def y = (double)3 return x >= y")); + assertEquals(true, exec("def x = (float)6 def y = (double)2 return x >= y")); + assertEquals(true, exec("def x = (double)7 def y = (double)1 return x >= y")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java new file mode 100644 index 00000000000..24849fae72b --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/DivisionTests.java @@ -0,0 +1,147 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for division operator across all types */ +//TODO: NaN/Inf/overflow/... +public class DivisionTests extends ScriptTestCase { + + // TODO: byte,short,char + + public void testInt() throws Exception { + assertEquals(1/1, exec("int x = 1; int y = 1; return x/y;")); + assertEquals(2/3, exec("int x = 2; int y = 3; return x/y;")); + assertEquals(5/10, exec("int x = 5; int y = 10; return x/y;")); + assertEquals(10/1/2, exec("int x = 10; int y = 1; int z = 2; return x/y/z;")); + assertEquals((10/1)/2, exec("int x = 10; int y = 1; int z = 2; return (x/y)/z;")); + assertEquals(10/(4/2), exec("int x = 10; int y = 4; int z = 2; return x/(y/z);")); + assertEquals(10/1, exec("int x = 10; int y = 1; return x/y;")); + assertEquals(0/1, exec("int x = 0; int y = 1; return x/y;")); + } + + public void testIntConst() throws Exception { + assertEquals(1/1, exec("return 1/1;")); + assertEquals(2/3, exec("return 2/3;")); + assertEquals(5/10, exec("return 5/10;")); + assertEquals(10/1/2, exec("return 10/1/2;")); + assertEquals((10/1)/2, exec("return (10/1)/2;")); + assertEquals(10/(4/2), exec("return 10/(4/2);")); + assertEquals(10/1, exec("return 10/1;")); + assertEquals(0/1, exec("return 0/1;")); + } + + public void testLong() throws Exception { + assertEquals(1L/1L, exec("long x = 1; long y = 1; return x/y;")); + assertEquals(2L/3L, exec("long x = 2; long y = 3; return x/y;")); + assertEquals(5L/10L, exec("long x = 5; long y = 10; return x/y;")); + assertEquals(10L/1L/2L, exec("long x = 10; long y = 1; long z = 2; return x/y/z;")); + assertEquals((10L/1L)/2L, exec("long x = 10; long y = 1; long z = 2; return (x/y)/z;")); + assertEquals(10L/(4L/2L), exec("long x = 10; long y = 4; long z = 2; return x/(y/z);")); + assertEquals(10L/1L, exec("long x = 10; long y = 1; return x/y;")); + assertEquals(0L/1L, exec("long x = 0; long y = 1; return x/y;")); + } + + public void testLongConst() throws Exception { + assertEquals(1L/1L, exec("return 1L/1L;")); + assertEquals(2L/3L, exec("return 2L/3L;")); + assertEquals(5L/10L, exec("return 5L/10L;")); + assertEquals(10L/1L/2L, exec("return 10L/1L/2L;")); + assertEquals((10L/1L)/2L, exec("return (10L/1L)/2L;")); + assertEquals(10L/(4L/2L), exec("return 10L/(4L/2L);")); + assertEquals(10L/1L, exec("return 10L/1L;")); + assertEquals(0L/1L, exec("return 0L/1L;")); + } + + public void testFloat() throws Exception { + assertEquals(1F/1F, exec("float x = 1; float y = 1; return x/y;")); + assertEquals(2F/3F, exec("float x = 2; float y = 3; return x/y;")); + assertEquals(5F/10F, exec("float x = 5; float y = 10; return x/y;")); + assertEquals(10F/1F/2F, exec("float x = 10; float y = 1; float z = 2; return x/y/z;")); + assertEquals((10F/1F)/2F, exec("float x = 10; float y = 1; float z = 2; return (x/y)/z;")); + assertEquals(10F/(4F/2F), exec("float x = 10; float y = 4; float z = 2; return x/(y/z);")); + assertEquals(10F/1F, exec("float x = 10; float y = 1; return x/y;")); + assertEquals(0F/1F, exec("float x = 0; float y = 1; return x/y;")); + } + + public void testFloatConst() throws Exception { + assertEquals(1F/1F, exec("return 1F/1F;")); + assertEquals(2F/3F, exec("return 2F/3F;")); + assertEquals(5F/10F, exec("return 5F/10F;")); + assertEquals(10F/1F/2F, exec("return 10F/1F/2F;")); + assertEquals((10F/1F)/2F, exec("return (10F/1F)/2F;")); + assertEquals(10F/(4F/2F), exec("return 10F/(4F/2F);")); + assertEquals(10F/1F, exec("return 10F/1F;")); + assertEquals(0F/1F, exec("return 0F/1F;")); + } + + public void testDouble() throws Exception { + assertEquals(1.0/1.0, exec("double x = 1; double y = 1; return x/y;")); + assertEquals(2.0/3.0, exec("double x = 2; double y = 3; return x/y;")); + assertEquals(5.0/10.0, exec("double x = 5; double y = 10; return x/y;")); + assertEquals(10.0/1.0/2.0, exec("double x = 10; double y = 1; double z = 2; return x/y/z;")); + assertEquals((10.0/1.0)/2.0, exec("double x = 10; double y = 1; double z = 2; return (x/y)/z;")); + assertEquals(10.0/(4.0/2.0), exec("double x = 10; double y = 4; double z = 2; return x/(y/z);")); + assertEquals(10.0/1.0, exec("double x = 10; double y = 1; return x/y;")); + assertEquals(0.0/1.0, exec("double x = 0; double y = 1; return x/y;")); + } + + public void testDoubleConst() throws Exception { + assertEquals(1.0/1.0, exec("return 1.0/1.0;")); + assertEquals(2.0/3.0, exec("return 2.0/3.0;")); + assertEquals(5.0/10.0, exec("return 5.0/10.0;")); + assertEquals(10.0/1.0/2.0, exec("return 10.0/1.0/2.0;")); + assertEquals((10.0/1.0)/2.0, exec("return (10.0/1.0)/2.0;")); + assertEquals(10.0/(4.0/2.0), exec("return 10.0/(4.0/2.0);")); + assertEquals(10.0/1.0, exec("return 10.0/1.0;")); + assertEquals(0.0/1.0, exec("return 0.0/1.0;")); + } + + public void testDivideByZero() throws Exception { + try { + exec("int x = 1; int y = 0; return x / y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + + try { + exec("long x = 1L; long y = 0L; return x / y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + } + + public void testDivideByZeroConst() throws Exception { + try { + exec("return 1/0;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + + try { + exec("return 1L/0L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java new file mode 100644 index 00000000000..db83755aeff --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/EqualsTests.java @@ -0,0 +1,184 @@ +package org.elasticsearch.plan.a; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// TODO: Figure out a way to test autobox caching properly from methods such as Integer.valueOf(int); +public class EqualsTests extends ScriptTestCase { + public void testTypesEquals() { + assertEquals(true, exec("return false === false;")); + assertEquals(true, exec("boolean x = false; boolean y = false; return x === y;")); + assertEquals(false, exec("return (byte)3 === (byte)4;")); + assertEquals(true, exec("byte x = 3; byte y = 3; return x === y;")); + assertEquals(false, exec("return (char)3 === (char)4;")); + assertEquals(true, exec("char x = 3; char y = 3; return x === y;")); + assertEquals(false, exec("return (short)3 === (short)4;")); + assertEquals(true, exec("short x = 3; short y = 3; return x === y;")); + assertEquals(false, exec("return (int)3 === (int)4;")); + assertEquals(true, exec("int x = 3; int y = 3; return x === y;")); + assertEquals(false, exec("return (long)3 === (long)4;")); + assertEquals(true, exec("long x = 3; long y = 3; return x === y;")); + assertEquals(false, exec("return (float)3 === (float)4;")); + assertEquals(true, exec("float x = 3; float y = 3; return x === y;")); + assertEquals(false, exec("return (double)3 === (double)4;")); + assertEquals(true, exec("double x = 3; double y = 3; return x === y;")); + + assertEquals(true, exec("return false == false;")); + assertEquals(true, exec("boolean x = false; boolean y = false; return x == y;")); + assertEquals(false, exec("return (byte)3 == (byte)4;")); + assertEquals(true, exec("byte x = 3; byte y = 3; return x == y;")); + assertEquals(false, exec("return (char)3 == (char)4;")); + assertEquals(true, exec("char x = 3; char y = 3; return x == y;")); + assertEquals(false, exec("return (short)3 == (short)4;")); + assertEquals(true, exec("short x = 3; short y = 3; return x == y;")); + assertEquals(false, exec("return (int)3 == (int)4;")); + assertEquals(true, exec("int x = 3; int y = 3; return x == y;")); + assertEquals(false, exec("return (long)3 == (long)4;")); + assertEquals(true, exec("long x = 3; long y = 3; return x == y;")); + assertEquals(false, exec("return (float)3 == (float)4;")); + assertEquals(true, exec("float x = 3; float y = 3; return x == y;")); + assertEquals(false, exec("return (double)3 == (double)4;")); + assertEquals(true, exec("double x = 3; double y = 3; return x == y;")); + } + + public void testTypesNotEquals() { + assertEquals(false, exec("return true !== true;")); + assertEquals(false, exec("boolean x = false; boolean y = false; return x !== y;")); + assertEquals(true, exec("return (byte)3 !== (byte)4;")); + assertEquals(false, exec("byte x = 3; byte y = 3; return x !== y;")); + assertEquals(true, exec("return (char)3 !== (char)4;")); + assertEquals(false, exec("char x = 3; char y = 3; return x !== y;")); + assertEquals(true, exec("return (short)3 !== (short)4;")); + assertEquals(false, exec("short x = 3; short y = 3; return x !== y;")); + assertEquals(true, exec("return (int)3 !== (int)4;")); + assertEquals(false, exec("int x = 3; int y = 3; return x !== y;")); + assertEquals(true, exec("return (long)3 !== (long)4;")); + assertEquals(false, exec("long x = 3; long y = 3; return x !== y;")); + assertEquals(true, exec("return (float)3 !== (float)4;")); + assertEquals(false, exec("float x = 3; float y = 3; return x !== y;")); + assertEquals(true, exec("return (double)3 !== (double)4;")); + assertEquals(false, exec("double x = 3; double y = 3; return x !== y;")); + + assertEquals(false, exec("return true != true;")); + assertEquals(false, exec("boolean x = false; boolean y = false; return x != y;")); + assertEquals(true, exec("return (byte)3 != (byte)4;")); + assertEquals(false, exec("byte x = 3; byte y = 3; return x != y;")); + assertEquals(true, exec("return (char)3 != (char)4;")); + assertEquals(false, exec("char x = 3; char y = 3; return x != y;")); + assertEquals(true, exec("return (short)3 != (short)4;")); + assertEquals(false, exec("short x = 3; short y = 3; return x != y;")); + assertEquals(true, exec("return (int)3 != (int)4;")); + assertEquals(false, exec("int x = 3; int y = 3; return x != y;")); + assertEquals(true, exec("return (long)3 != (long)4;")); + assertEquals(false, exec("long x = 3; long y = 3; return x != y;")); + assertEquals(true, exec("return (float)3 != (float)4;")); + assertEquals(false, exec("float x = 3; float y = 3; return x != y;")); + assertEquals(true, exec("return (double)3 != (double)4;")); + assertEquals(false, exec("double x = 3; double y = 3; return x != y;")); + } + + public void testEquals() { + assertEquals(true, exec("return new Long(3) == new Long(3);")); + assertEquals(false, exec("return new Long(3) === new Long(3);")); + assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x == y;")); + assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x === y;")); + assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x == y;")); + assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x === y;")); + assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x == y;")); + assertEquals(true, exec("Integer x = new Integer(3); short y = 3; return x == y;")); + assertEquals(true, exec("Integer x = new Integer(3); Short y = (short)3; return x == y;")); + assertEquals(false, exec("Integer x = new Integer(3); int y = 3; return x === y;")); + assertEquals(false, exec("Integer x = new Integer(3); double y = 3; return x === y;")); + assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x == y;")); + assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x === y;")); + assertEquals(false, exec("int[] x = new int[1]; Object y = new int[1]; return x == y;")); + assertEquals(false, exec("int[] x = new int[1]; Object y = new int[1]; return x === y;")); + assertEquals(false, exec("Map x = new HashMap(); List y = new ArrayList(); return x == y;")); + assertEquals(false, exec("Map x = new HashMap(); List y = new ArrayList(); return x === y;")); + } + + public void testNotEquals() { + assertEquals(false, exec("return new Long(3) != new Long(3);")); + assertEquals(true, exec("return new Long(3) !== new Long(3);")); + assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x != y;")); + assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x !== y;")); + assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x != y;")); + assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x !== y;")); + assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x !== y;")); + assertEquals(true, exec("Integer x = new Integer(3); double y = 3; return x !== y;")); + assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x != y;")); + assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x !== y;")); + assertEquals(true, exec("int[] x = new int[1]; Object y = new int[1]; return x != y;")); + assertEquals(true, exec("int[] x = new int[1]; Object y = new int[1]; return x !== y;")); + assertEquals(true, exec("Map x = new HashMap(); List y = new ArrayList(); return x != y;")); + assertEquals(true, exec("Map x = new HashMap(); List y = new ArrayList(); return x !== y;")); + } + + public void testBranchEquals() { + assertEquals(0, exec("Character a = 'a'; Character b = 'b'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("Character a = 'a'; Character b = 'a'; if (a == b) return 1; else return 0;")); + assertEquals(0, exec("Integer a = new Integer(1); Integer b = 1; if (a === b) return 1; else return 0;")); + assertEquals(0, exec("Character a = 'a'; Character b = new Character('a'); if (a === b) return 1; else return 0;")); + assertEquals(1, exec("Character a = 'a'; Object b = a; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("Integer a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); + assertEquals(0, exec("Integer a = 1; Character b = 'a'; if (a === (Object)b) return 1; else return 0;")); + } + + public void testBranchNotEquals() { + assertEquals(1, exec("Character a = 'a'; Character b = 'b'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("Character a = 'a'; Character b = 'a'; if (a != b) return 1; else return 0;")); + assertEquals(1, exec("Integer a = new Integer(1); Integer b = 1; if (a !== b) return 1; else return 0;")); + assertEquals(1, exec("Character a = 'a'; Character b = new Character('a'); if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("Character a = 'a'; Object b = a; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("Integer a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); + assertEquals(1, exec("Integer a = 1; Character b = 'a'; if (a !== (Object)b) return 1; else return 0;")); + } + + public void testRightHandNull() { + assertEquals(false, exec("Character a = 'a'; return a == null;")); + assertEquals(false, exec("Character a = 'a'; return a === null;")); + assertEquals(true, exec("Character a = 'a'; return a != null;")); + assertEquals(true, exec("Character a = 'a'; return a !== null;")); + assertEquals(true, exec("Character a = null; return a == null;")); + assertEquals(false, exec("Character a = null; return a != null;")); + assertEquals(false, exec("Character a = 'a'; Character b = null; return a == b;")); + assertEquals(true, exec("Character a = null; Character b = null; return a === b;")); + assertEquals(true, exec("Character a = 'a'; Character b = null; return a != b;")); + assertEquals(false, exec("Character a = null; Character b = null; return a !== b;")); + assertEquals(false, exec("Integer x = null; double y = 2.0; return x == y;")); + assertEquals(true, exec("Integer x = null; Short y = null; return x == y;")); + } + + public void testLeftHandNull() { + assertEquals(false, exec("Character a = 'a'; return null == a;")); + assertEquals(false, exec("Character a = 'a'; return null === a;")); + assertEquals(true, exec("Character a = 'a'; return null != a;")); + assertEquals(true, exec("Character a = 'a'; return null !== a;")); + assertEquals(true, exec("Character a = null; return null == a;")); + assertEquals(false, exec("Character a = null; return null != a;")); + assertEquals(false, exec("Character a = null; Character b = 'a'; return a == b;")); + assertEquals(true, exec("Character a = null; Character b = null; return a == b;")); + assertEquals(true, exec("Character a = null; Character b = null; return b === a;")); + assertEquals(true, exec("Character a = null; Character b = 'a'; return a != b;")); + assertEquals(false, exec("Character a = null; Character b = null; return b != a;")); + assertEquals(false, exec("Character a = null; Character b = null; return b !== a;")); + assertEquals(false, exec("Integer x = null; double y = 2.0; return y == x;")); + assertEquals(true, exec("Integer x = null; Short y = null; return y == x;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java new file mode 100644 index 00000000000..7504ed9d4bc --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FieldTests.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.junit.Before; + +public class FieldTests extends ScriptTestCase { + public static class FieldClass { + public boolean z = false; + public byte b = 0; + public short s = 1; + public char c = 'c'; + public int i = 2; + public int si = -1; + public long j = 3l; + public float f = 4.0f; + public double d = 5.0; + public String t = "s"; + public Object l = new Object(); + + public float test(float a, float b) { + return Math.min(a, b); + } + + public int getSi() { + return si; + } + + public void setSi(final int si) { + this.si = si; + } + } + + public static class FieldDefinition extends Definition { + FieldDefinition() { + super(); + + addStruct("FieldClass", FieldClass.class); + addConstructor("FieldClass", "new", new Type[] {}, null); + addField("FieldClass", "z", null, false, booleanType, null); + addField("FieldClass", "b", null, false, byteType, null); + addField("FieldClass", "s", null, false, shortType, null); + addField("FieldClass", "c", null, false, charType, null); + addField("FieldClass", "i", null, false, intType, null); + addField("FieldClass", "j", null, false, longType, null); + addField("FieldClass", "f", null, false, floatType, null); + addField("FieldClass", "d", null, false, doubleType, null); + addField("FieldClass", "t", null, false, stringType, null); + addField("FieldClass", "l", null, false, objectType, null); + addClass("FieldClass"); + addMethod("FieldClass", "getSi", null, false, intType, new Type[] {}, null, null); + addMethod("FieldClass", "setSi", null, false, voidType, new Type[] {intType}, null, null); + addMethod("FieldClass", "test", null, false, floatType, new Type[] {floatType, floatType}, null, null); + } + } + + @Before + public void setDefinition() { + scriptEngine.setDefinition(new FieldDefinition()); + } + + public void testIntField() { + assertEquals("s5t42", exec("def fc = new FieldClass() return fc.t += 2 + fc.j + \"t\" + 4 + (3 - 1)")); + assertEquals(2.0f, exec("def fc = new FieldClass(); def l = new Double(3) Byte b = new Byte((byte)2) return fc.test(l, b)")); + assertEquals(4, exec("def fc = new FieldClass() fc.i = 4 return fc.i")); + assertEquals(5, exec("FieldClass fc0 = new FieldClass() FieldClass fc1 = new FieldClass() fc0.i = 7 - fc0.i fc1.i = fc0.i return fc1.i")); + assertEquals(8, exec("def fc0 = new FieldClass() def fc1 = new FieldClass() fc0.i += fc1.i fc0.i += fc0.i return fc0.i")); + } + + public void testExplicitShortcut() { + assertEquals(5, exec("FieldClass fc = new FieldClass() fc.setSi(5) return fc.si")); + assertEquals(-1, exec("FieldClass fc = new FieldClass() def x = fc.getSi() x")); + assertEquals(5, exec("FieldClass fc = new FieldClass() fc.si = 5 return fc.si")); + assertEquals(0, exec("FieldClass fc = new FieldClass() fc.si++ return fc.si")); + assertEquals(-1, exec("FieldClass fc = new FieldClass() def x = fc.si++ return x")); + assertEquals(0, exec("FieldClass fc = new FieldClass() def x = ++fc.si return x")); + assertEquals(-2, exec("FieldClass fc = new FieldClass() fc.si *= 2 fc.si")); + assertEquals("-1test", exec("FieldClass fc = new FieldClass() fc.si + \"test\"")); + } + + public void testImplicitShortcut() { + assertEquals(5, exec("def fc = new FieldClass() fc.setSi(5) return fc.si")); + assertEquals(-1, exec("def fc = new FieldClass() def x = fc.getSi() x")); + assertEquals(5, exec("def fc = new FieldClass() fc.si = 5 return fc.si")); + assertEquals(0, exec("def fc = new FieldClass() fc.si++ return fc.si")); + assertEquals(-1, exec("def fc = new FieldClass() def x = fc.si++ return x")); + assertEquals(0, exec("def fc = new FieldClass() def x = ++fc.si return x")); + assertEquals(-2, exec("def fc = new FieldClass() fc.si *= 2 fc.si")); + assertEquals("-1test", exec("def fc = new FieldClass() fc.si + \"test\"")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java new file mode 100644 index 00000000000..94beac0c58c --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java @@ -0,0 +1,294 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.common.settings.Settings; + +/** Tests floating point overflow with numeric overflow disabled */ +public class FloatOverflowDisabledTests extends ScriptTestCase { + + @Override + protected Settings getSettings() { + Settings.Builder builder = Settings.builder(); + builder.put(super.getSettings()); + builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); + return builder.build(); + } + + public void testAssignmentAdditionOverflow() { + // float + try { + exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double + try { + exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentSubtractionOverflow() { + // float + try { + exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double + try { + exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentMultiplicationOverflow() { + // float + try { + exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double + try { + exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentDivisionOverflow() { + // float + try { + exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 1.0f; x /= 0.0f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double + try { + exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.0f; x /= 0.0; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAddition() throws Exception { + try { + exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAdditionConst() throws Exception { + try { + exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.7976931348623157E308 + 1.7976931348623157E308;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testSubtraction() throws Exception { + try { + exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testSubtractionConst() throws Exception { + try { + exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return -1.7976931348623157E308 - 1.7976931348623157E308;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplication() throws Exception { + try { + exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplicationConst() throws Exception { + try { + exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.7976931348623157E308 * 1.7976931348623157E308;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivision() throws Exception { + try { + exec("float x = 3.4028234663852886E38f; float y = 1.401298464324817E-45f; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 1.0f; float y = 0.0f; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.0; double y = 0.0; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivisionConst() throws Exception { + try { + exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.0f / 0.0f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.7976931348623157E308 / 4.9E-324;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.0 / 0.0;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivisionNaN() throws Exception { + // float division, constant division, and assignment + try { + exec("float x = 0f; float y = 0f; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 0f / 0f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 0f; x /= 0f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double division, constant division, and assignment + try { + exec("double x = 0.0; double y = 0.0; return x / y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 0.0 / 0.0;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 0.0; x /= 0.0; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testRemainderNaN() throws Exception { + // float division, constant division, and assignment + try { + exec("float x = 1f; float y = 0f; return x % y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1f % 0f;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("float x = 1f; x %= 0f; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + + // double division, constant division, and assignment + try { + exec("double x = 1.0; double y = 0.0; return x % y;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("return 1.0 % 0.0;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + try { + exec("double x = 1.0; x %= 0.0; return x;"); + fail("didn't hit expected exception"); + } catch (ArithmeticException expected) {} + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java new file mode 100644 index 00000000000..ff1c315628f --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.common.settings.Settings; + +/** Tests floating point overflow with numeric overflow enabled */ +public class FloatOverflowEnabledTests extends ScriptTestCase { + + @Override + protected Settings getSettings() { + Settings.Builder builder = Settings.builder(); + builder.put(super.getSettings()); + builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); + return builder.build(); + } + + public void testAssignmentAdditionOverflow() { + // float + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;")); + + // double + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;")); + } + + public void testAssignmentSubtractionOverflow() { + // float + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;")); + + // double + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;")); + } + + public void testAssignmentMultiplicationOverflow() { + // float + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;")); + + // double + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;")); + } + + public void testAssignmentDivisionOverflow() { + // float + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; x /= 0.0f; return x;")); + + // double + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;")); + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0f; x /= 0.0; return x;")); + } + + public void testAddition() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;")); + } + + public void testAdditionConst() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); + assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 + 1.7976931348623157E308;")); + } + + public void testSubtraction() throws Exception { + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;")); + } + + public void testSubtractionConst() throws Exception { + assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); + assertEquals(Double.NEGATIVE_INFINITY, exec("return -1.7976931348623157E308 - 1.7976931348623157E308;")); + } + + public void testMultiplication() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;")); + } + + public void testMultiplicationConst() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); + assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 * 1.7976931348623157E308;")); + } + + public void testDivision() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 1.401298464324817E-45f; return x / y;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; float y = 0.0f; return x / y;")); + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;")); + assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0; double y = 0.0; return x / y;")); + } + + public void testDivisionConst() throws Exception { + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;")); + assertEquals(Float.POSITIVE_INFINITY, exec("return 1.0f / 0.0f;")); + assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 / 4.9E-324;")); + assertEquals(Double.POSITIVE_INFINITY, exec("return 1.0 / 0.0;")); + } + + public void testDivisionNaN() throws Exception { + // float division, constant division, and assignment + assertTrue(Float.isNaN((Float) exec("float x = 0f; float y = 0f; return x / y;"))); + assertTrue(Float.isNaN((Float) exec("return 0f / 0f;"))); + assertTrue(Float.isNaN((Float) exec("float x = 0f; x /= 0f; return x;"))); + + // double division, constant division, and assignment + assertTrue(Double.isNaN((Double) exec("double x = 0.0; double y = 0.0; return x / y;"))); + assertTrue(Double.isNaN((Double) exec("return 0.0 / 0.0;"))); + assertTrue(Double.isNaN((Double) exec("double x = 0.0; x /= 0.0; return x;"))); + } + + public void testRemainderNaN() throws Exception { + // float division, constant division, and assignment + assertTrue(Float.isNaN((Float) exec("float x = 1f; float y = 0f; return x % y;"))); + assertTrue(Float.isNaN((Float) exec("return 1f % 0f;"))); + assertTrue(Float.isNaN((Float) exec("float x = 1f; x %= 0f; return x;"))); + + // double division, constant division, and assignment + assertTrue(Double.isNaN((Double) exec("double x = 1.0; double y = 0.0; return x % y;"))); + assertTrue(Double.isNaN((Double) exec("return 1.0 % 0.0;"))); + assertTrue(Double.isNaN((Double) exec("double x = 1.0; x %= 0.0; return x;"))); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java new file mode 100644 index 00000000000..ec4ffd0ec1d --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IncrementTests.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for increment/decrement operators across all data types */ +public class IncrementTests extends ScriptTestCase { + + /** incrementing byte values */ + public void testIncrementByte() { + assertEquals((byte)0, exec("byte x = (byte)0; return x++;")); + assertEquals((byte)0, exec("byte x = (byte)0; return x--;")); + assertEquals((byte)1, exec("byte x = (byte)0; return ++x;")); + assertEquals((byte)-1, exec("byte x = (byte)0; return --x;")); + } + + /** incrementing char values */ + public void testIncrementChar() { + assertEquals((char)0, exec("char x = (char)0; return x++;")); + assertEquals((char)1, exec("char x = (char)1; return x--;")); + assertEquals((char)1, exec("char x = (char)0; return ++x;")); + } + + /** incrementing short values */ + public void testIncrementShort() { + assertEquals((short)0, exec("short x = (short)0; return x++;")); + assertEquals((short)0, exec("short x = (short)0; return x--;")); + assertEquals((short)1, exec("short x = (short)0; return ++x;")); + assertEquals((short)-1, exec("short x = (short)0; return --x;")); + } + + /** incrementing integer values */ + public void testIncrementInt() { + assertEquals(0, exec("int x = 0; return x++;")); + assertEquals(0, exec("int x = 0; return x--;")); + assertEquals(1, exec("int x = 0; return ++x;")); + assertEquals(-1, exec("int x = 0; return --x;")); + } + + /** incrementing long values */ + public void testIncrementLong() { + assertEquals(0L, exec("long x = 0; return x++;")); + assertEquals(0L, exec("long x = 0; return x--;")); + assertEquals(1L, exec("long x = 0; return ++x;")); + assertEquals(-1L, exec("long x = 0; return --x;")); + } + + /** incrementing float values */ + public void testIncrementFloat() { + assertEquals(0F, exec("float x = 0F; return x++;")); + assertEquals(0F, exec("float x = 0F; return x--;")); + assertEquals(1F, exec("float x = 0F; return ++x;")); + assertEquals(-1F, exec("float x = 0F; return --x;")); + } + + /** incrementing double values */ + public void testIncrementDouble() { + assertEquals(0D, exec("double x = 0.0; return x++;")); + assertEquals(0D, exec("double x = 0.0; return x--;")); + assertEquals(1D, exec("double x = 0.0; return ++x;")); + assertEquals(-1D, exec("double x = 0.0; return --x;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java new file mode 100644 index 00000000000..279ea0616d9 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java @@ -0,0 +1,445 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.common.settings.Settings; + +/** Tests integer overflow with numeric overflow disabled */ +public class IntegerOverflowDisabledTests extends ScriptTestCase { + + @Override + protected Settings getSettings() { + Settings.Builder builder = Settings.builder(); + builder.put(super.getSettings()); + builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, false); + return builder.build(); + } + + public void testAssignmentAdditionOverflow() { + // byte + try { + exec("byte x = 0; x += 128; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 0; x += -129; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // short + try { + exec("short x = 0; x += 32768; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 0; x += -32769; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // char + try { + exec("char x = 0; x += 65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = 0; x += -65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // int + try { + exec("int x = 1; x += 2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = -2; x += -2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = 1; x += 9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -2; x += -9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentSubtractionOverflow() { + // byte + try { + exec("byte x = 0; x -= -128; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 0; x -= 129; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // short + try { + exec("short x = 0; x -= -32768; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 0; x -= 32769; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // char + try { + exec("char x = 0; x -= -65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = 0; x -= 65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // int + try { + exec("int x = 1; x -= -2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = -2; x -= 2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = 1; x -= -9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -2; x -= 9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentMultiplicationOverflow() { + // byte + try { + exec("byte x = 2; x *= 128; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 2; x *= -128; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // char + try { + exec("char x = 2; x *= 65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = 2; x *= -65536; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // int + try { + exec("int x = 2; x *= 2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = 2; x *= -2147483647; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = 2; x *= 9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = 2; x *= -9223372036854775807L; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAssignmentDivisionOverflow() { + // byte + try { + exec("byte x = (byte) -128; x /= -1; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // short + try { + exec("short x = (short) -32768; x /= -1; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // cannot happen for char: unsigned + + // int + try { + exec("int x = -2147483647 - 1; x /= -1; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testIncrementOverFlow() throws Exception { + // byte + try { + exec("byte x = 127; ++x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = 127; x++; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = (byte) -128; --x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("byte x = (byte) -128; x--; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // short + try { + exec("short x = 32767; ++x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("short x = 32767; x++; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("short x = (short) -32768; --x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("short x = (short) -32768; x--; return x;"); + } catch (ArithmeticException expected) {} + + // char + try { + exec("char x = 65535; ++x; return x;"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = 65535; x++; return x;"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = (char) 0; --x; return x;"); + } catch (ArithmeticException expected) {} + + try { + exec("char x = (char) 0; x--; return x;"); + } catch (ArithmeticException expected) {} + + // int + try { + exec("int x = 2147483647; ++x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = 2147483647; x++; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = (int) -2147483648L; --x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("int x = (int) -2147483648L; x--; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + // long + try { + exec("long x = 9223372036854775807L; ++x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = 9223372036854775807L; x++; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -9223372036854775807L - 1L; --x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -9223372036854775807L - 1L; x--; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAddition() throws Exception { + try { + exec("int x = 2147483647; int y = 2147483647; return x + y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testAdditionConst() throws Exception { + try { + exec("return 2147483647 + 2147483647;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("return 9223372036854775807L + 9223372036854775807L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + + public void testSubtraction() throws Exception { + try { + exec("int x = -10; int y = 2147483647; return x - y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -10L; long y = 9223372036854775807L; return x - y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testSubtractionConst() throws Exception { + try { + exec("return -10 - 2147483647;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("return -10L - 9223372036854775807L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplication() throws Exception { + try { + exec("int x = 2147483647; int y = 2147483647; return x * y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplicationConst() throws Exception { + try { + exec("return 2147483647 * 2147483647;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("return 9223372036854775807L * 9223372036854775807L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivision() throws Exception { + try { + exec("int x = -2147483647 - 1; int y = -1; return x / y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -9223372036854775808L; long y = -1L; return x / y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivisionConst() throws Exception { + try { + exec("return (-2147483648) / -1;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + + try { + exec("return (-9223372036854775808L) / -1L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) {} + } + + public void testNegationOverflow() throws Exception { + try { + exec("int x = -2147483648; x = -x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -9223372036854775808L; x = -x; return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testNegationOverflowConst() throws Exception { + try { + exec("int x = -(-2147483648); return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + exec("long x = -(-9223372036854775808L); return x;"); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java new file mode 100644 index 00000000000..8abd2695915 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.common.settings.Settings; + +/** Tests integer overflow with numeric overflow enabled */ +public class IntegerOverflowEnabledTests extends ScriptTestCase { + + @Override + protected Settings getSettings() { + Settings.Builder builder = Settings.builder(); + builder.put(super.getSettings()); + builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, true); + return builder.build(); + } + + public void testAssignmentAdditionOverflow() { + // byte + assertEquals((byte)(0 + 128), exec("byte x = 0; x += 128; return x;")); + assertEquals((byte)(0 + -129), exec("byte x = 0; x += -129; return x;")); + + // short + assertEquals((short)(0 + 32768), exec("short x = 0; x += 32768; return x;")); + assertEquals((short)(0 + -32769), exec("short x = 0; x += -32769; return x;")); + + // char + assertEquals((char)(0 + 65536), exec("char x = 0; x += 65536; return x;")); + assertEquals((char)(0 + -65536), exec("char x = 0; x += -65536; return x;")); + + // int + assertEquals(1 + 2147483647, exec("int x = 1; x += 2147483647; return x;")); + assertEquals(-2 + -2147483647, exec("int x = -2; x += -2147483647; return x;")); + + // long + assertEquals(1L + 9223372036854775807L, exec("long x = 1; x += 9223372036854775807L; return x;")); + assertEquals(-2L + -9223372036854775807L, exec("long x = -2; x += -9223372036854775807L; return x;")); + } + + public void testAssignmentSubtractionOverflow() { + // byte + assertEquals((byte)(0 - -128), exec("byte x = 0; x -= -128; return x;")); + assertEquals((byte)(0 - 129), exec("byte x = 0; x -= 129; return x;")); + + // short + assertEquals((short)(0 - -32768), exec("short x = 0; x -= -32768; return x;")); + assertEquals((short)(0 - 32769), exec("short x = 0; x -= 32769; return x;")); + + // char + assertEquals((char)(0 - -65536), exec("char x = 0; x -= -65536; return x;")); + assertEquals((char)(0 - 65536), exec("char x = 0; x -= 65536; return x;")); + + // int + assertEquals(1 - -2147483647, exec("int x = 1; x -= -2147483647; return x;")); + assertEquals(-2 - 2147483647, exec("int x = -2; x -= 2147483647; return x;")); + + // long + assertEquals(1L - -9223372036854775807L, exec("long x = 1; x -= -9223372036854775807L; return x;")); + assertEquals(-2L - 9223372036854775807L, exec("long x = -2; x -= 9223372036854775807L; return x;")); + } + + public void testAssignmentMultiplicationOverflow() { + // byte + assertEquals((byte) (2 * 128), exec("byte x = 2; x *= 128; return x;")); + assertEquals((byte) (2 * -128), exec("byte x = 2; x *= -128; return x;")); + + // char + assertEquals((char) (2 * 65536), exec("char x = 2; x *= 65536; return x;")); + assertEquals((char) (2 * -65536), exec("char x = 2; x *= -65536; return x;")); + + // int + assertEquals(2 * 2147483647, exec("int x = 2; x *= 2147483647; return x;")); + assertEquals(2 * -2147483647, exec("int x = 2; x *= -2147483647; return x;")); + + // long + assertEquals(2L * 9223372036854775807L, exec("long x = 2; x *= 9223372036854775807L; return x;")); + assertEquals(2L * -9223372036854775807L, exec("long x = 2; x *= -9223372036854775807L; return x;")); + } + + public void testAssignmentDivisionOverflow() { + // byte + assertEquals((byte) (-128 / -1), exec("byte x = (byte) -128; x /= -1; return x;")); + + // short + assertEquals((short) (-32768 / -1), exec("short x = (short) -32768; x /= -1; return x;")); + + // cannot happen for char: unsigned + + // int + assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483647 - 1; x /= -1; return x;")); + + // long + assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;")); + } + + public void testIncrementOverFlow() throws Exception { + // byte + assertEquals((byte) 128, exec("byte x = 127; ++x; return x;")); + assertEquals((byte) 128, exec("byte x = 127; x++; return x;")); + assertEquals((byte) -129, exec("byte x = (byte) -128; --x; return x;")); + assertEquals((byte) -129, exec("byte x = (byte) -128; x--; return x;")); + + // short + assertEquals((short) 32768, exec("short x = 32767; ++x; return x;")); + assertEquals((short) 32768, exec("short x = 32767; x++; return x;")); + assertEquals((short) -32769, exec("short x = (short) -32768; --x; return x;")); + assertEquals((short) -32769, exec("short x = (short) -32768; x--; return x;")); + + // char + assertEquals((char) 65536, exec("char x = 65535; ++x; return x;")); + assertEquals((char) 65536, exec("char x = 65535; x++; return x;")); + assertEquals((char) -1, exec("char x = (char) 0; --x; return x;")); + assertEquals((char) -1, exec("char x = (char) 0; x--; return x;")); + + // int + assertEquals(2147483647 + 1, exec("int x = 2147483647; ++x; return x;")); + assertEquals(2147483647 + 1, exec("int x = 2147483647; x++; return x;")); + assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; --x; return x;")); + assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; x--; return x;")); + + // long + assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; ++x; return x;")); + assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; x++; return x;")); + assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; --x; return x;")); + assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; x--; return x;")); + } + + public void testAddition() throws Exception { + assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); + assertEquals(9223372036854775807L + 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;")); + } + + public void testAdditionConst() throws Exception { + assertEquals(2147483647 + 2147483647, exec("return 2147483647 + 2147483647;")); + assertEquals(9223372036854775807L + 9223372036854775807L, exec("return 9223372036854775807L + 9223372036854775807L;")); + } + + public void testSubtraction() throws Exception { + assertEquals(-10 - 2147483647, exec("int x = -10; int y = 2147483647; return x - y;")); + assertEquals(-10L - 9223372036854775807L, exec("long x = -10L; long y = 9223372036854775807L; return x - y;")); + } + + public void testSubtractionConst() throws Exception { + assertEquals(-10 - 2147483647, exec("return -10 - 2147483647;")); + assertEquals(-10L - 9223372036854775807L, exec("return -10L - 9223372036854775807L;")); + } + + public void testMultiplication() throws Exception { + assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); + assertEquals(9223372036854775807L * 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;")); + } + + public void testMultiplicationConst() throws Exception { + assertEquals(2147483647 * 2147483647, exec("return 2147483647 * 2147483647;")); + assertEquals(9223372036854775807L * 9223372036854775807L, exec("return 9223372036854775807L * 9223372036854775807L;")); + } + + public void testDivision() throws Exception { + assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483648; int y = -1; return x / y;")); + assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775808L; long y = -1L; return x / y;")); + } + + public void testDivisionConst() throws Exception { + assertEquals((-2147483647 - 1) / -1, exec("return (-2147483648) / -1;")); + assertEquals((-9223372036854775807L - 1L) / -1L, exec("return (-9223372036854775808L) / -1L;")); + } + + public void testNegationOverflow() throws Exception { + assertEquals(-(-2147483647 - 1), exec("int x = -2147483648; x = -x; return x;")); + assertEquals(-(-9223372036854775807L - 1L), exec("long x = -9223372036854775808L; x = -x; return x;")); + } + + public void testNegationOverflowConst() throws Exception { + assertEquals(-(-2147483647 - 1), exec("int x = -(-2147483648); return x;")); + assertEquals(-(-9223372036854775807L - 1L), exec("long x = -(-9223372036854775808L); return x;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java new file mode 100644 index 00000000000..c5fde3b6ff1 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/MultiplicationTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for multiplication operator across all types */ +//TODO: NaN/Inf/overflow/... +public class MultiplicationTests extends ScriptTestCase { + + // TODO: short,byte,char + + public void testInt() throws Exception { + assertEquals(1*1, exec("int x = 1; int y = 1; return x*y;")); + assertEquals(2*3, exec("int x = 2; int y = 3; return x*y;")); + assertEquals(5*10, exec("int x = 5; int y = 10; return x*y;")); + assertEquals(1*1*2, exec("int x = 1; int y = 1; int z = 2; return x*y*z;")); + assertEquals((1*1)*2, exec("int x = 1; int y = 1; int z = 2; return (x*y)*z;")); + assertEquals(1*(1*2), exec("int x = 1; int y = 1; int z = 2; return x*(y*z);")); + assertEquals(10*0, exec("int x = 10; int y = 0; return x*y;")); + assertEquals(0*0, exec("int x = 0; int y = 0; return x*x;")); + } + + public void testIntConst() throws Exception { + assertEquals(1*1, exec("return 1*1;")); + assertEquals(2*3, exec("return 2*3;")); + assertEquals(5*10, exec("return 5*10;")); + assertEquals(1*1*2, exec("return 1*1*2;")); + assertEquals((1*1)*2, exec("return (1*1)*2;")); + assertEquals(1*(1*2), exec("return 1*(1*2);")); + assertEquals(10*0, exec("return 10*0;")); + assertEquals(0*0, exec("return 0*0;")); + } + + public void testByte() throws Exception { + assertEquals((byte)1*(byte)1, exec("byte x = 1; byte y = 1; return x*y;")); + assertEquals((byte)2*(byte)3, exec("byte x = 2; byte y = 3; return x*y;")); + assertEquals((byte)5*(byte)10, exec("byte x = 5; byte y = 10; return x*y;")); + assertEquals((byte)1*(byte)1*(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x*y*z;")); + assertEquals(((byte)1*(byte)1)*(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x*y)*z;")); + assertEquals((byte)1*((byte)1*(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x*(y*z);")); + assertEquals((byte)10*(byte)0, exec("byte x = 10; byte y = 0; return x*y;")); + assertEquals((byte)0*(byte)0, exec("byte x = 0; byte y = 0; return x*x;")); + } + + public void testLong() throws Exception { + assertEquals(1L*1L, exec("long x = 1; long y = 1; return x*y;")); + assertEquals(2L*3L, exec("long x = 2; long y = 3; return x*y;")); + assertEquals(5L*10L, exec("long x = 5; long y = 10; return x*y;")); + assertEquals(1L*1L*2L, exec("long x = 1; long y = 1; int z = 2; return x*y*z;")); + assertEquals((1L*1L)*2L, exec("long x = 1; long y = 1; int z = 2; return (x*y)*z;")); + assertEquals(1L*(1L*2L), exec("long x = 1; long y = 1; int z = 2; return x*(y*z);")); + assertEquals(10L*0L, exec("long x = 10; long y = 0; return x*y;")); + assertEquals(0L*0L, exec("long x = 0; long y = 0; return x*x;")); + } + + public void testLongConst() throws Exception { + assertEquals(1L*1L, exec("return 1L*1L;")); + assertEquals(2L*3L, exec("return 2L*3L;")); + assertEquals(5L*10L, exec("return 5L*10L;")); + assertEquals(1L*1L*2L, exec("return 1L*1L*2L;")); + assertEquals((1L*1L)*2L, exec("return (1L*1L)*2L;")); + assertEquals(1L*(1L*2L), exec("return 1L*(1L*2L);")); + assertEquals(10L*0L, exec("return 10L*0L;")); + assertEquals(0L*0L, exec("return 0L*0L;")); + } + + public void testFloat() throws Exception { + assertEquals(1F*1F, exec("float x = 1; float y = 1; return x*y;")); + assertEquals(2F*3F, exec("float x = 2; float y = 3; return x*y;")); + assertEquals(5F*10F, exec("float x = 5; float y = 10; return x*y;")); + assertEquals(1F*1F*2F, exec("float x = 1; float y = 1; float z = 2; return x*y*z;")); + assertEquals((1F*1F)*2F, exec("float x = 1; float y = 1; float z = 2; return (x*y)*z;")); + assertEquals(1F*(1F*2F), exec("float x = 1; float y = 1; float z = 2; return x*(y*z);")); + assertEquals(10F*0F, exec("float x = 10; float y = 0; return x*y;")); + assertEquals(0F*0F, exec("float x = 0; float y = 0; return x*x;")); + } + + public void testFloatConst() throws Exception { + assertEquals(1F*1F, exec("return 1F*1F;")); + assertEquals(2F*3F, exec("return 2F*3F;")); + assertEquals(5F*10F, exec("return 5F*10F;")); + assertEquals(1F*1F*2F, exec("return 1F*1F*2F;")); + assertEquals((1F*1F)*2F, exec("return (1F*1F)*2F;")); + assertEquals(1F*(1F*2F), exec("return 1F*(1F*2F);")); + assertEquals(10F*0F, exec("return 10F*0F;")); + assertEquals(0F*0F, exec("return 0F*0F;")); + } + + public void testDouble() throws Exception { + assertEquals(1D*1D, exec("double x = 1; double y = 1; return x*y;")); + assertEquals(2D*3D, exec("double x = 2; double y = 3; return x*y;")); + assertEquals(5D*10D, exec("double x = 5; double y = 10; return x*y;")); + assertEquals(1D*1D*2D, exec("double x = 1; double y = 1; double z = 2; return x*y*z;")); + assertEquals((1D*1D)*2D, exec("double x = 1; double y = 1; double z = 2; return (x*y)*z;")); + assertEquals(1D*(1D*2D), exec("double x = 1; double y = 1; double z = 2; return x*(y*z);")); + assertEquals(10D*0D, exec("double x = 10; float y = 0; return x*y;")); + assertEquals(0D*0D, exec("double x = 0; float y = 0; return x*x;")); + } + + public void testDoubleConst() throws Exception { + assertEquals(1.0*1.0, exec("return 1.0*1.0;")); + assertEquals(2.0*3.0, exec("return 2.0*3.0;")); + assertEquals(5.0*10.0, exec("return 5.0*10.0;")); + assertEquals(1.0*1.0*2.0, exec("return 1.0*1.0*2.0;")); + assertEquals((1.0*1.0)*2.0, exec("return (1.0*1.0)*2.0;")); + assertEquals(1.0*(1.0*2.0), exec("return 1.0*(1.0*2.0);")); + assertEquals(10.0*0.0, exec("return 10.0*0.0;")); + assertEquals(0.0*0.0, exec("return 0.0*0.0;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java new file mode 100644 index 00000000000..ff56ee3f07e --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/NoSemiColonTest.java @@ -0,0 +1,178 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import java.util.HashMap; +import java.util.Map; + +public class NoSemiColonTest extends ScriptTestCase { + + public void testIfStatement() { + assertEquals(1, exec("int x = 5 if (x == 5) return 1 return 0")); + assertEquals(0, exec("int x = 4 if (x == 5) return 1 else return 0")); + assertEquals(2, exec("int x = 4 if (x == 5) return 1 else if (x == 4) return 2 else return 0")); + assertEquals(1, exec("int x = 4 if (x == 5) return 1 else if (x == 4) return 1 else return 0")); + + assertEquals(3, exec( + "int x = 5\n" + + "if (x == 5) {\n" + + " int y = 2\n" + + " \n" + + " if (y == 2) {\n" + + " x = 3\n" + + " }\n" + + " \n" + + "}\n" + + "\n" + + "return x\n")); + } + + public void testWhileStatement() { + + assertEquals("aaaaaa", exec("String c = \"a\" int x while (x < 5) { c ..= \"a\" ++x } return c")); + + Object value = exec( + " byte[][] b = new byte[5][5] \n" + + " byte x = 0, y \n" + + " \n" + + " while (x < 5) { \n" + + " y = 0 \n" + + " \n" + + " while (y < 5) { \n" + + " b[x][y] = (byte)(x*y) \n" + + " ++y \n" + + " } \n" + + " \n" + + " ++x \n" + + " } \n" + + " \n" + + " return b \n"); + + byte[][] b = (byte[][])value; + + for (byte x = 0; x < 5; ++x) { + for (byte y = 0; y < 5; ++y) { + assertEquals(x*y, b[x][y]); + } + } + } + + public void testDoWhileStatement() { + assertEquals("aaaaaa", exec("String c = \"a\" int x do { c ..= \"a\" ++x } while (x < 5) return c")); + + Object value = exec( + " long[][] l = new long[5][5] \n" + + " long x = 0, y \n" + + " \n" + + " do { \n" + + " y = 0 \n" + + " \n" + + " do { \n" + + " l[(int)x][(int)y] = x*y \n" + + " ++y \n" + + " } while (y < 5) \n" + + " \n" + + " ++x \n" + + " } while (x < 5) \n" + + " \n" + + " return l \n"); + + long[][] l = (long[][])value; + + for (long x = 0; x < 5; ++x) { + for (long y = 0; y < 5; ++y) { + assertEquals(x*y, l[(int)x][(int)y]); + } + } + } + + public void testForStatement() { + assertEquals("aaaaaa", exec("String c = \"a\" for (int x = 0; x < 5; ++x) c ..= \"a\" return c")); + + Object value = exec( + " int[][] i = new int[5][5] \n" + + " for (int x = 0; x < 5; ++x) { \n" + + " for (int y = 0; y < 5; ++y) { \n" + + " i[x][y] = x*y \n" + + " } \n" + + " } \n" + + " \n" + + " return i \n"); + + int[][] i = (int[][])value; + + for (int x = 0; x < 5; ++x) { + for (int y = 0; y < 5; ++y) { + assertEquals(x*y, i[x][y]); + } + } + } + + public void testDeclarationStatement() { + assertEquals((byte)2, exec("byte a = 2 return a")); + assertEquals((short)2, exec("short a = 2 return a")); + assertEquals((char)2, exec("char a = 2 return a")); + assertEquals(2, exec("int a = 2 return a")); + assertEquals(2L, exec("long a = 2 return a")); + assertEquals(2F, exec("float a = 2 return a")); + assertEquals(2.0, exec("double a = 2 return a")); + assertEquals(false, exec("boolean a = false return a")); + assertEquals("string", exec("String a = \"string\" return a")); + assertEquals(HashMap.class, exec("Map a = new HashMap() return a").getClass()); + + assertEquals(byte[].class, exec("byte[] a = new byte[1] return a").getClass()); + assertEquals(short[].class, exec("short[] a = new short[1] return a").getClass()); + assertEquals(char[].class, exec("char[] a = new char[1] return a").getClass()); + assertEquals(int[].class, exec("int[] a = new int[1] return a").getClass()); + assertEquals(long[].class, exec("long[] a = new long[1] return a").getClass()); + assertEquals(float[].class, exec("float[] a = new float[1] return a").getClass()); + assertEquals(double[].class, exec("double[] a = new double[1] return a").getClass()); + assertEquals(boolean[].class, exec("boolean[] a = new boolean[1] return a").getClass()); + assertEquals(String[].class, exec("String[] a = new String[1] return a").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1] return a").getClass()); + + assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2] return a").getClass()); + assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3] return a").getClass()); + assertEquals(char[][][][].class, exec("char[][][][] a = new char[1][2][3][4] return a").getClass()); + assertEquals(int[][][][][].class, exec("int[][][][][] a = new int[1][2][3][4][5] return a").getClass()); + assertEquals(long[][].class, exec("long[][] a = new long[1][2] return a").getClass()); + assertEquals(float[][][].class, exec("float[][][] a = new float[1][2][3] return a").getClass()); + assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4] return a").getClass()); + assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5] return a").getClass()); + assertEquals(String[][].class, exec("String[][] a = new String[1][2] return a").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3] return a").getClass()); + } + + public void testContinueStatement() { + assertEquals(9, exec("int x = 0, y = 0 while (x < 10) { ++x if (x == 1) continue ++y } return y")); + } + + public void testBreakStatement() { + assertEquals(4, exec("int x = 0, y = 0 while (x < 10) { ++x if (x == 5) break ++y } return y")); + } + + public void testReturnStatement() { + assertEquals(10, exec("return 10")); + assertEquals(5, exec("int x = 5 return x")); + assertEquals(4, exec("int[] x = new int[2] x[1] = 4 return x[1]")); + assertEquals(5, ((short[])exec("short[] s = new short[3] s[1] = 5 return s"))[1]); + assertEquals(10, ((Map)exec("Map s = new HashMap< String,Object>() s.put(\"x\", 10) return s")).get("x")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java new file mode 100644 index 00000000000..f3ba0c88fc1 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/OrTests.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for or operator across all types */ +public class OrTests extends ScriptTestCase { + + public void testInt() throws Exception { + assertEquals(5 | 12, exec("int x = 5; int y = 12; return x | y;")); + assertEquals(5 | -12, exec("int x = 5; int y = -12; return x | y;")); + assertEquals(7 | 15 | 3, exec("int x = 7; int y = 15; int z = 3; return x | y | z;")); + } + + public void testIntConst() throws Exception { + assertEquals(5 | 12, exec("return 5 | 12;")); + assertEquals(5 | -12, exec("return 5 | -12;")); + assertEquals(7 | 15 | 3, exec("return 7 | 15 | 3;")); + } + + public void testLong() throws Exception { + assertEquals(5L | 12L, exec("long x = 5; long y = 12; return x | y;")); + assertEquals(5L | -12L, exec("long x = 5; long y = -12; return x | y;")); + assertEquals(7L | 15L | 3L, exec("long x = 7; long y = 15; long z = 3; return x | y | z;")); + } + + public void testLongConst() throws Exception { + assertEquals(5L | 12L, exec("return 5L | 12L;")); + assertEquals(5L | -12L, exec("return 5L | -12L;")); + assertEquals(7L | 15L | 3L, exec("return 7L | 15L | 3L;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java new file mode 100644 index 00000000000..c2c19ccb03a --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; + +import java.io.IOException; +import java.util.Collection; + +/** Runs yaml rest tests */ +public class PlanARestIT extends ESRestTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(PlanAPlugin.class); + } + + public PlanARestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); + } +} + diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java new file mode 100644 index 00000000000..c7b6f7b1e3f --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/RemainderTests.java @@ -0,0 +1,147 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for division operator across all types */ +//TODO: NaN/Inf/overflow/... +public class RemainderTests extends ScriptTestCase { + + // TODO: byte,short,char + + public void testInt() throws Exception { + assertEquals(1%1, exec("int x = 1; int y = 1; return x%y;")); + assertEquals(2%3, exec("int x = 2; int y = 3; return x%y;")); + assertEquals(5%10, exec("int x = 5; int y = 10; return x%y;")); + assertEquals(10%1%2, exec("int x = 10; int y = 1; int z = 2; return x%y%z;")); + assertEquals((10%1)%2, exec("int x = 10; int y = 1; int z = 2; return (x%y)%z;")); + assertEquals(10%(4%3), exec("int x = 10; int y = 4; int z = 3; return x%(y%z);")); + assertEquals(10%1, exec("int x = 10; int y = 1; return x%y;")); + assertEquals(0%1, exec("int x = 0; int y = 1; return x%y;")); + } + + public void testIntConst() throws Exception { + assertEquals(1%1, exec("return 1%1;")); + assertEquals(2%3, exec("return 2%3;")); + assertEquals(5%10, exec("return 5%10;")); + assertEquals(10%1%2, exec("return 10%1%2;")); + assertEquals((10%1)%2, exec("return (10%1)%2;")); + assertEquals(10%(4%3), exec("return 10%(4%3);")); + assertEquals(10%1, exec("return 10%1;")); + assertEquals(0%1, exec("return 0%1;")); + } + + public void testLong() throws Exception { + assertEquals(1L%1L, exec("long x = 1; long y = 1; return x%y;")); + assertEquals(2L%3L, exec("long x = 2; long y = 3; return x%y;")); + assertEquals(5L%10L, exec("long x = 5; long y = 10; return x%y;")); + assertEquals(10L%1L%2L, exec("long x = 10; long y = 1; long z = 2; return x%y%z;")); + assertEquals((10L%1L)%2L, exec("long x = 10; long y = 1; long z = 2; return (x%y)%z;")); + assertEquals(10L%(4L%3L), exec("long x = 10; long y = 4; long z = 3; return x%(y%z);")); + assertEquals(10L%1L, exec("long x = 10; long y = 1; return x%y;")); + assertEquals(0L%1L, exec("long x = 0; long y = 1; return x%y;")); + } + + public void testLongConst() throws Exception { + assertEquals(1L%1L, exec("return 1L%1L;")); + assertEquals(2L%3L, exec("return 2L%3L;")); + assertEquals(5L%10L, exec("return 5L%10L;")); + assertEquals(10L%1L%2L, exec("return 10L%1L%2L;")); + assertEquals((10L%1L)%2L, exec("return (10L%1L)%2L;")); + assertEquals(10L%(4L%3L), exec("return 10L%(4L%3L);")); + assertEquals(10L%1L, exec("return 10L%1L;")); + assertEquals(0L%1L, exec("return 0L%1L;")); + } + + public void testFloat() throws Exception { + assertEquals(1F%1F, exec("float x = 1; float y = 1; return x%y;")); + assertEquals(2F%3F, exec("float x = 2; float y = 3; return x%y;")); + assertEquals(5F%10F, exec("float x = 5; float y = 10; return x%y;")); + assertEquals(10F%1F%2F, exec("float x = 10; float y = 1; float z = 2; return x%y%z;")); + assertEquals((10F%1F)%2F, exec("float x = 10; float y = 1; float z = 2; return (x%y)%z;")); + assertEquals(10F%(4F%3F), exec("float x = 10; float y = 4; float z = 3; return x%(y%z);")); + assertEquals(10F%1F, exec("float x = 10; float y = 1; return x%y;")); + assertEquals(0F%1F, exec("float x = 0; float y = 1; return x%y;")); + } + + public void testFloatConst() throws Exception { + assertEquals(1F%1F, exec("return 1F%1F;")); + assertEquals(2F%3F, exec("return 2F%3F;")); + assertEquals(5F%10F, exec("return 5F%10F;")); + assertEquals(10F%1F%2F, exec("return 10F%1F%2F;")); + assertEquals((10F%1F)%2F, exec("return (10F%1F)%2F;")); + assertEquals(10F%(4F%3F), exec("return 10F%(4F%3F);")); + assertEquals(10F%1F, exec("return 10F%1F;")); + assertEquals(0F%1F, exec("return 0F%1F;")); + } + + public void testDouble() throws Exception { + assertEquals(1.0%1.0, exec("double x = 1; double y = 1; return x%y;")); + assertEquals(2.0%3.0, exec("double x = 2; double y = 3; return x%y;")); + assertEquals(5.0%10.0, exec("double x = 5; double y = 10; return x%y;")); + assertEquals(10.0%1.0%2.0, exec("double x = 10; double y = 1; double z = 2; return x%y%z;")); + assertEquals((10.0%1.0)%2.0, exec("double x = 10; double y = 1; double z = 2; return (x%y)%z;")); + assertEquals(10.0%(4.0%3.0), exec("double x = 10; double y = 4; double z = 3; return x%(y%z);")); + assertEquals(10.0%1.0, exec("double x = 10; double y = 1; return x%y;")); + assertEquals(0.0%1.0, exec("double x = 0; double y = 1; return x%y;")); + } + + public void testDoubleConst() throws Exception { + assertEquals(1.0%1.0, exec("return 1.0%1.0;")); + assertEquals(2.0%3.0, exec("return 2.0%3.0;")); + assertEquals(5.0%10.0, exec("return 5.0%10.0;")); + assertEquals(10.0%1.0%2.0, exec("return 10.0%1.0%2.0;")); + assertEquals((10.0%1.0)%2.0, exec("return (10.0%1.0)%2.0;")); + assertEquals(10.0%(4.0%3.0), exec("return 10.0%(4.0%3.0);")); + assertEquals(10.0%1.0, exec("return 10.0%1.0;")); + assertEquals(0.0%1.0, exec("return 0.0%1.0;")); + } + + public void testDivideByZero() throws Exception { + try { + exec("int x = 1; int y = 0; return x % y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + + try { + exec("long x = 1L; long y = 0L; return x % y;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + } + + public void testDivideByZeroConst() throws Exception { + try { + exec("return 1%0;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + + try { + exec("return 1L%0L;"); + fail("should have hit exception"); + } catch (ArithmeticException expected) { + // divide by zero + } + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java new file mode 100644 index 00000000000..d2bbe02a625 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptService; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +public class ScriptEngineTests extends ScriptTestCase { + + public void testSimpleEquation() { + final Object value = exec("return 1 + 2;"); + assertEquals(3, ((Number)value).intValue()); + } + + public void testMapAccess() { + Map vars = new HashMap<>(); + Map obj2 = new HashMap<>(); + obj2.put("prop2", "value2"); + Map obj1 = new HashMap<>(); + obj1.put("prop1", "value1"); + obj1.put("obj2", obj2); + obj1.put("l", Arrays.asList("2", "1")); + vars.put("obj1", obj1); + + Object value = exec("return input.get(\"obj1\");", vars); + obj1 = (Map)value; + assertEquals("value1", obj1.get("prop1")); + assertEquals("value2", ((Map) obj1.get("obj2")).get("prop2")); + + value = exec("return ((List)((Map)input.get(\"obj1\")).get(\"l\")).get(0);", vars); + assertEquals("2", value); + } + + public void testAccessListInScript() { + Map vars = new HashMap<>(); + Map obj2 = new HashMap<>(); + obj2.put("prop2", "value2"); + Map obj1 = new HashMap<>(); + obj1.put("prop1", "value1"); + obj1.put("obj2", obj2); + vars.put("l", Arrays.asList("1", "2", "3", obj1)); + + assertEquals(4, exec("return ((List)input.get(\"l\")).size();", vars)); + assertEquals("1", exec("return ((List)input.get(\"l\")).get(0);", vars)); + + Object value = exec("return ((List)input.get(\"l\")).get(3);", vars); + obj1 = (Map)value; + assertEquals("value1", obj1.get("prop1")); + assertEquals("value2", ((Map)obj1.get("obj2")).get("prop2")); + + assertEquals("value1", exec("return ((Map)((List)input.get(\"l\")).get(3)).get(\"prop1\");", vars)); + } + + public void testChangingVarsCrossExecution1() { + Map vars = new HashMap<>(); + Map ctx = new HashMap<>(); + vars.put("ctx", ctx); + + Object compiledScript = scriptEngine.compile("return ((Map)input.get(\"ctx\")).get(\"value\");"); + ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, + "testChangingVarsCrossExecution1", "plan-a", compiledScript), vars); + + ctx.put("value", 1); + Object o = script.run(); + assertEquals(1, ((Number) o).intValue()); + + ctx.put("value", 2); + o = script.run(); + assertEquals(2, ((Number) o).intValue()); + } + + public void testChangingVarsCrossExecution2() { + Map vars = new HashMap<>(); + Object compiledScript = scriptEngine.compile("return input.get(\"value\");"); + + ExecutableScript script = scriptEngine.executable(new CompiledScript(ScriptService.ScriptType.INLINE, + "testChangingVarsCrossExecution2", "plan-a", compiledScript), vars); + + script.setNextVar("value", 1); + Object value = script.run(); + assertEquals(1, ((Number)value).intValue()); + + script.setNextVar("value", 2); + value = script.run(); + assertEquals(2, ((Number)value).intValue()); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java new file mode 100644 index 00000000000..253e37183f3 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Map; + +/** + * Base test case for scripting unit tests. + *

+ * Typically just asserts the output of {@code exec()} + */ +public abstract class ScriptTestCase extends ESTestCase { + protected PlanAScriptEngineService scriptEngine; + + /** Override to provide different compiler settings */ + protected Settings getSettings() { + Settings.Builder builder = Settings.builder(); + builder.put(PlanAScriptEngineService.NUMERIC_OVERFLOW, random().nextBoolean()); + return builder.build(); + } + + @Before + public void setup() { + scriptEngine = new PlanAScriptEngineService(getSettings()); + } + + /** Compiles and returns the result of {@code script} */ + public Object exec(String script) { + return exec(script, null); + } + + /** Compiles and returns the result of {@code script} with access to {@code vars} */ + public Object exec(String script, Map vars) { + Object object = scriptEngine.compile(script); + CompiledScript compiled = new CompiledScript(ScriptService.ScriptType.INLINE, getTestName(), "plan-a", object); + return scriptEngine.executable(compiled, vars).run(); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java new file mode 100644 index 00000000000..0fbcaa1e6d3 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/StringTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +public class StringTests extends ScriptTestCase { + + public void testAppend() { + // boolean + assertEquals("cat" + true, exec("String s = \"cat\"; return s + true;")); + // byte + assertEquals("cat" + (byte)3, exec("String s = \"cat\"; return s + (byte)3;")); + // short + assertEquals("cat" + (short)3, exec("String s = \"cat\"; return s + (short)3;")); + // char + assertEquals("cat" + 't', exec("String s = \"cat\"; return s + 't';")); + assertEquals("cat" + (char)40, exec("String s = \"cat\"; return s + (char)40;")); + // int + assertEquals("cat" + 2, exec("String s = \"cat\"; return s + 2;")); + // long + assertEquals("cat" + 2L, exec("String s = \"cat\"; return s + 2L;")); + // float + assertEquals("cat" + 2F, exec("String s = \"cat\"; return s + 2F;")); + // double + assertEquals("cat" + 2.0, exec("String s = \"cat\"; return s + 2.0;")); + // String + assertEquals("cat" + "cat", exec("String s = \"cat\"; return s + s;")); + } + + public void testStringAPI() { + assertEquals("", exec("return new String();")); + assertEquals('x', exec("String s = \"x\"; return s.charAt(0);")); + assertEquals(120, exec("String s = \"x\"; return s.codePointAt(0);")); + assertEquals(0, exec("String s = \"x\"; return s.compareTo(\"x\");")); + assertEquals("xx", exec("String s = \"x\"; return s.concat(\"x\");")); + assertEquals(true, exec("String s = \"xy\"; return s.endsWith(\"y\");")); + assertEquals(2, exec("String t = \"abcde\"; return t.indexOf(\"cd\", 1);")); + assertEquals(false, exec("String t = \"abcde\"; return t.isEmpty();")); + assertEquals(5, exec("String t = \"abcde\"; return t.length();")); + assertEquals("cdcde", exec("String t = \"abcde\"; return t.replace(\"ab\", \"cd\");")); + assertEquals(false, exec("String s = \"xy\"; return s.startsWith(\"y\");")); + assertEquals("e", exec("String t = \"abcde\"; return t.substring(4, 5);")); + assertEquals(97, ((char[])exec("String s = \"a\"; return s.toCharArray();"))[0]); + assertEquals("a", exec("String s = \" a \"; return s.trim();")); + assertEquals('x', exec("return \"x\".charAt(0);")); + assertEquals(120, exec("return \"x\".codePointAt(0);")); + assertEquals(0, exec("return \"x\".compareTo(\"x\");")); + assertEquals("xx", exec("return \"x\".concat(\"x\");")); + assertEquals(true, exec("return \"xy\".endsWith(\"y\");")); + assertEquals(2, exec("return \"abcde\".indexOf(\"cd\", 1);")); + assertEquals(false, exec("return \"abcde\".isEmpty();")); + assertEquals(5, exec("return \"abcde\".length();")); + assertEquals("cdcde", exec("return \"abcde\".replace(\"ab\", \"cd\");")); + assertEquals(false, exec("return \"xy\".startsWith(\"y\");")); + assertEquals("e", exec("return \"abcde\".substring(4, 5);")); + assertEquals(97, ((char[])exec("return \"a\".toCharArray();"))[0]); + assertEquals("a", exec("return \" a \".trim();")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java new file mode 100644 index 00000000000..1acd0458b52 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/SubtractionTests.java @@ -0,0 +1,179 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for subtraction operator across all types */ +//TODO: NaN/Inf/overflow/... +public class SubtractionTests extends ScriptTestCase { + + public void testInt() throws Exception { + assertEquals(1-1, exec("int x = 1; int y = 1; return x-y;")); + assertEquals(2-3, exec("int x = 2; int y = 3; return x-y;")); + assertEquals(5-10, exec("int x = 5; int y = 10; return x-y;")); + assertEquals(1-1-2, exec("int x = 1; int y = 1; int z = 2; return x-y-z;")); + assertEquals((1-1)-2, exec("int x = 1; int y = 1; int z = 2; return (x-y)-z;")); + assertEquals(1-(1-2), exec("int x = 1; int y = 1; int z = 2; return x-(y-z);")); + assertEquals(10-0, exec("int x = 10; int y = 0; return x-y;")); + assertEquals(0-0, exec("int x = 0; int y = 0; return x-x;")); + } + + public void testIntConst() throws Exception { + assertEquals(1-1, exec("return 1-1;")); + assertEquals(2-3, exec("return 2-3;")); + assertEquals(5-10, exec("return 5-10;")); + assertEquals(1-1-2, exec("return 1-1-2;")); + assertEquals((1-1)-2, exec("return (1-1)-2;")); + assertEquals(1-(1-2), exec("return 1-(1-2);")); + assertEquals(10-0, exec("return 10-0;")); + assertEquals(0-0, exec("return 0-0;")); + } + + public void testByte() throws Exception { + assertEquals((byte)1-(byte)1, exec("byte x = 1; byte y = 1; return x-y;")); + assertEquals((byte)2-(byte)3, exec("byte x = 2; byte y = 3; return x-y;")); + assertEquals((byte)5-(byte)10, exec("byte x = 5; byte y = 10; return x-y;")); + assertEquals((byte)1-(byte)1-(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return x-y-z;")); + assertEquals(((byte)1-(byte)1)-(byte)2, exec("byte x = 1; byte y = 1; byte z = 2; return (x-y)-z;")); + assertEquals((byte)1-((byte)1-(byte)2), exec("byte x = 1; byte y = 1; byte z = 2; return x-(y-z);")); + assertEquals((byte)10-(byte)1, exec("byte x = 10; byte y = 1; return x-y;")); + assertEquals((byte)0-(byte)0, exec("byte x = 0; byte y = 0; return x-y;")); + } + + public void testByteConst() throws Exception { + assertEquals((byte)1-(byte)1, exec("return (byte)1-(byte)1;")); + assertEquals((byte)2-(byte)3, exec("return (byte)2-(byte)3;")); + assertEquals((byte)5-(byte)10, exec("return (byte)5-(byte)10;")); + assertEquals((byte)1-(byte)1-(byte)2, exec("return (byte)1-(byte)1-(byte)2;")); + assertEquals(((byte)1-(byte)1)-(byte)2, exec("return ((byte)1-(byte)1)-(byte)2;")); + assertEquals((byte)1-((byte)1-(byte)2), exec("return (byte)1-((byte)1-(byte)2);")); + assertEquals((byte)10-(byte)1, exec("return (byte)10-(byte)1;")); + assertEquals((byte)0-(byte)0, exec("return (byte)0-(byte)0;")); + } + + public void testChar() throws Exception { + assertEquals((char)1-(char)1, exec("char x = 1; char y = 1; return x-y;")); + assertEquals((char)2-(char)3, exec("char x = 2; char y = 3; return x-y;")); + assertEquals((char)5-(char)10, exec("char x = 5; char y = 10; return x-y;")); + assertEquals((char)1-(char)1-(char)2, exec("char x = 1; char y = 1; char z = 2; return x-y-z;")); + assertEquals(((char)1-(char)1)-(char)2, exec("char x = 1; char y = 1; char z = 2; return (x-y)-z;")); + assertEquals((char)1-((char)1-(char)2), exec("char x = 1; char y = 1; char z = 2; return x-(y-z);")); + assertEquals((char)10-(char)1, exec("char x = 10; char y = 1; return x-y;")); + assertEquals((char)0-(char)0, exec("char x = 0; char y = 0; return x-y;")); + } + + public void testCharConst() throws Exception { + assertEquals((char)1-(char)1, exec("return (char)1-(char)1;")); + assertEquals((char)2-(char)3, exec("return (char)2-(char)3;")); + assertEquals((char)5-(char)10, exec("return (char)5-(char)10;")); + assertEquals((char)1-(char)1-(char)2, exec("return (char)1-(char)1-(char)2;")); + assertEquals(((char)1-(char)1)-(char)2, exec("return ((char)1-(char)1)-(char)2;")); + assertEquals((char)1-((char)1-(char)2), exec("return (char)1-((char)1-(char)2);")); + assertEquals((char)10-(char)1, exec("return (char)10-(char)1;")); + assertEquals((char)0-(char)0, exec("return (char)0-(char)0;")); + } + + public void testShort() throws Exception { + assertEquals((short)1-(short)1, exec("short x = 1; short y = 1; return x-y;")); + assertEquals((short)2-(short)3, exec("short x = 2; short y = 3; return x-y;")); + assertEquals((short)5-(short)10, exec("short x = 5; short y = 10; return x-y;")); + assertEquals((short)1-(short)1-(short)2, exec("short x = 1; short y = 1; short z = 2; return x-y-z;")); + assertEquals(((short)1-(short)1)-(short)2, exec("short x = 1; short y = 1; short z = 2; return (x-y)-z;")); + assertEquals((short)1-((short)1-(short)2), exec("short x = 1; short y = 1; short z = 2; return x-(y-z);")); + assertEquals((short)10-(short)1, exec("short x = 10; short y = 1; return x-y;")); + assertEquals((short)0-(short)0, exec("short x = 0; short y = 0; return x-y;")); + } + + public void testShortConst() throws Exception { + assertEquals((short)1-(short)1, exec("return (short)1-(short)1;")); + assertEquals((short)2-(short)3, exec("return (short)2-(short)3;")); + assertEquals((short)5-(short)10, exec("return (short)5-(short)10;")); + assertEquals((short)1-(short)1-(short)2, exec("return (short)1-(short)1-(short)2;")); + assertEquals(((short)1-(short)1)-(short)2, exec("return ((short)1-(short)1)-(short)2;")); + assertEquals((short)1-((short)1-(short)2), exec("return (short)1-((short)1-(short)2);")); + assertEquals((short)10-(short)1, exec("return (short)10-(short)1;")); + assertEquals((short)0-(short)0, exec("return (short)0-(short)0;")); + } + + public void testLong() throws Exception { + assertEquals(1L-1L, exec("long x = 1; long y = 1; return x-y;")); + assertEquals(2L-3L, exec("long x = 2; long y = 3; return x-y;")); + assertEquals(5L-10L, exec("long x = 5; long y = 10; return x-y;")); + assertEquals(1L-1L-2L, exec("long x = 1; long y = 1; int z = 2; return x-y-z;")); + assertEquals((1L-1L)-2L, exec("long x = 1; long y = 1; int z = 2; return (x-y)-z;")); + assertEquals(1L-(1L-2L), exec("long x = 1; long y = 1; int z = 2; return x-(y-z);")); + assertEquals(10L-0L, exec("long x = 10; long y = 0; return x-y;")); + assertEquals(0L-0L, exec("long x = 0; long y = 0; return x-x;")); + } + + public void testLongConst() throws Exception { + assertEquals(1L-1L, exec("return 1L-1L;")); + assertEquals(2L-3L, exec("return 2L-3L;")); + assertEquals(5L-10L, exec("return 5L-10L;")); + assertEquals(1L-1L-2L, exec("return 1L-1L-2L;")); + assertEquals((1L-1L)-2L, exec("return (1L-1L)-2L;")); + assertEquals(1L-(1L-2L), exec("return 1L-(1L-2L);")); + assertEquals(10L-0L, exec("return 10L-0L;")); + assertEquals(0L-0L, exec("return 0L-0L;")); + } + + public void testFloat() throws Exception { + assertEquals(1F-1F, exec("float x = 1; float y = 1; return x-y;")); + assertEquals(2F-3F, exec("float x = 2; float y = 3; return x-y;")); + assertEquals(5F-10F, exec("float x = 5; float y = 10; return x-y;")); + assertEquals(1F-1F-2F, exec("float x = 1; float y = 1; float z = 2; return x-y-z;")); + assertEquals((1F-1F)-2F, exec("float x = 1; float y = 1; float z = 2; return (x-y)-z;")); + assertEquals(1F-(1F-2F), exec("float x = 1; float y = 1; float z = 2; return x-(y-z);")); + assertEquals(10F-0F, exec("float x = 10; float y = 0; return x-y;")); + assertEquals(0F-0F, exec("float x = 0; float y = 0; return x-x;")); + } + + public void testFloatConst() throws Exception { + assertEquals(1F-1F, exec("return 1F-1F;")); + assertEquals(2F-3F, exec("return 2F-3F;")); + assertEquals(5F-10F, exec("return 5F-10F;")); + assertEquals(1F-1F-2F, exec("return 1F-1F-2F;")); + assertEquals((1F-1F)-2F, exec("return (1F-1F)-2F;")); + assertEquals(1F-(1F-2F), exec("return 1F-(1F-2F);")); + assertEquals(10F-0F, exec("return 10F-0F;")); + assertEquals(0F-0F, exec("return 0F-0F;")); + } + + public void testDouble() throws Exception { + assertEquals(1D-1D, exec("double x = 1; double y = 1; return x-y;")); + assertEquals(2D-3D, exec("double x = 2; double y = 3; return x-y;")); + assertEquals(5D-10D, exec("double x = 5; double y = 10; return x-y;")); + assertEquals(1D-1D-2D, exec("double x = 1; double y = 1; double z = 2; return x-y-z;")); + assertEquals((1D-1D)-2D, exec("double x = 1; double y = 1; double z = 2; return (x-y)-z;")); + assertEquals(1D-(1D-2D), exec("double x = 1; double y = 1; double z = 2; return x-(y-z);")); + assertEquals(10D-0D, exec("double x = 10; float y = 0; return x-y;")); + assertEquals(0D-0D, exec("double x = 0; float y = 0; return x-x;")); + } + + public void testyDoubleConst() throws Exception { + assertEquals(1.0-1.0, exec("return 1.0-1.0;")); + assertEquals(2.0-3.0, exec("return 2.0-3.0;")); + assertEquals(5.0-10.0, exec("return 5.0-10.0;")); + assertEquals(1.0-1.0-2.0, exec("return 1.0-1.0-2.0;")); + assertEquals((1.0-1.0)-2.0, exec("return (1.0-1.0)-2.0;")); + assertEquals(1.0-(1.0-2.0), exec("return 1.0-(1.0-2.0);")); + assertEquals(10.0-0.0, exec("return 10.0-0.0;")); + assertEquals(0.0-0.0, exec("return 0.0-0.0;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java new file mode 100644 index 00000000000..c0199ffadd5 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UnaryTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for unary operators across different types */ +public class UnaryTests extends ScriptTestCase { + + /** basic tests */ + public void testBasics() { + assertEquals(false, exec("return !true;")); + assertEquals(true, exec("boolean x = false; return !x;")); + assertEquals(-2, exec("return ~1;")); + assertEquals(-2, exec("byte x = 1; return ~x;")); + assertEquals(1, exec("return +1;")); + assertEquals(1.0, exec("double x = 1; return +x;")); + assertEquals(-1, exec("return -1;")); + assertEquals(-2, exec("short x = 2; return -x;")); + } + + public void testNegationInt() throws Exception { + assertEquals(-1, exec("return -1;")); + assertEquals(1, exec("return -(-1);")); + assertEquals(0, exec("return -0;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java new file mode 100644 index 00000000000..5c9fe20d1a7 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/UtilityTests.java @@ -0,0 +1,250 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.elasticsearch.test.ESTestCase; + +/** + * Tests utility methods (typically built-ins) + */ +public class UtilityTests extends ESTestCase { + + public void testDivideWithoutOverflowInt() { + assertEquals(5 / 2, Utility.divideWithoutOverflow(5, 2)); + + try { + Utility.divideWithoutOverflow(Integer.MIN_VALUE, -1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(5, 0); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivideWithoutOverflowLong() { + assertEquals(5L / 2L, Utility.divideWithoutOverflow(5L, 2L)); + + try { + Utility.divideWithoutOverflow(Long.MIN_VALUE, -1L); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(5L, 0L); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testToByteExact() { + for (int b = Byte.MIN_VALUE; b < Byte.MAX_VALUE; b++) { + assertEquals((byte)b, Utility.toByteExact(b)); + } + + try { + Utility.toByteExact(Byte.MIN_VALUE - 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.toByteExact(Byte.MAX_VALUE + 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testToShortExact() { + for (int s = Short.MIN_VALUE; s < Short.MAX_VALUE; s++) { + assertEquals((short)s, Utility.toShortExact(s)); + } + + try { + Utility.toShortExact(Short.MIN_VALUE - 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.toShortExact(Short.MAX_VALUE + 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testToCharExact() { + for (int c = Character.MIN_VALUE; c < Character.MAX_VALUE; c++) { + assertEquals((char)c, Utility.toCharExact(c)); + } + + try { + Utility.toCharExact(Character.MIN_VALUE - 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.toCharExact(Character.MAX_VALUE + 1); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAddWithoutOverflowFloat() { + assertEquals(10F, Utility.addWithoutOverflow(5F, 5F), 0F); + assertTrue(Float.isNaN(Utility.addWithoutOverflow(5F, Float.NaN))); + assertTrue(Float.isNaN(Utility.addWithoutOverflow(Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY))); + + try { + Utility.addWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.addWithoutOverflow(-Float.MAX_VALUE, -Float.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testAddWithoutOverflowDouble() { + assertEquals(10D, Utility.addWithoutOverflow(5D, 5D), 0D); + assertTrue(Double.isNaN(Utility.addWithoutOverflow(5D, Double.NaN))); + assertTrue(Double.isNaN(Utility.addWithoutOverflow(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY))); + + try { + Utility.addWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.addWithoutOverflow(-Double.MAX_VALUE, -Double.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testSubtractWithoutOverflowFloat() { + assertEquals(5F, Utility.subtractWithoutOverflow(10F, 5F), 0F); + assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(5F, Float.NaN))); + assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY))); + + try { + Utility.subtractWithoutOverflow(Float.MAX_VALUE, -Float.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.subtractWithoutOverflow(-Float.MAX_VALUE, Float.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testSubtractWithoutOverflowDouble() { + assertEquals(5D, Utility.subtractWithoutOverflow(10D, 5D), 0D); + assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(5D, Double.NaN))); + assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY))); + + try { + Utility.subtractWithoutOverflow(Double.MAX_VALUE, -Double.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.subtractWithoutOverflow(-Double.MAX_VALUE, Double.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplyWithoutOverflowFloat() { + assertEquals(25F, Utility.multiplyWithoutOverflow(5F, 5F), 0F); + assertTrue(Float.isNaN(Utility.multiplyWithoutOverflow(5F, Float.NaN))); + assertEquals(Float.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5F, Float.POSITIVE_INFINITY), 0F); + + try { + Utility.multiplyWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testMultiplyWithoutOverflowDouble() { + assertEquals(25D, Utility.multiplyWithoutOverflow(5D, 5D), 0D); + assertTrue(Double.isNaN(Utility.multiplyWithoutOverflow(5D, Double.NaN))); + assertEquals(Double.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5D, Double.POSITIVE_INFINITY), 0D); + + try { + Utility.multiplyWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivideWithoutOverflowFloat() { + assertEquals(5F, Utility.divideWithoutOverflow(25F, 5F), 0F); + assertTrue(Float.isNaN(Utility.divideWithoutOverflow(5F, Float.NaN))); + assertEquals(Float.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Float.POSITIVE_INFINITY, 5F), 0F); + + try { + Utility.divideWithoutOverflow(Float.MAX_VALUE, Float.MIN_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(0F, 0F); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(5F, 0F); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testDivideWithoutOverflowDouble() { + assertEquals(5D, Utility.divideWithoutOverflow(25D, 5D), 0D); + assertTrue(Double.isNaN(Utility.divideWithoutOverflow(5D, Double.NaN))); + assertEquals(Double.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Double.POSITIVE_INFINITY, 5D), 0D); + + try { + Utility.divideWithoutOverflow(Double.MAX_VALUE, Double.MIN_VALUE); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(0D, 0D); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + + try { + Utility.divideWithoutOverflow(5D, 0D); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testRemainderWithoutOverflowFloat() { + assertEquals(1F, Utility.remainderWithoutOverflow(25F, 4F), 0F); + + try { + Utility.remainderWithoutOverflow(5F, 0F); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } + + public void testRemainderWithoutOverflowDouble() { + assertEquals(1D, Utility.remainderWithoutOverflow(25D, 4D), 0D); + + try { + Utility.remainderWithoutOverflow(5D, 0D); + fail("did not get expected exception"); + } catch (ArithmeticException expected) {} + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java new file mode 100644 index 00000000000..de2c1c9ea3e --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +public class WhenThingsGoWrongTests extends ScriptTestCase { + public void testNullPointer() { + try { + exec("int x = (int) ((Map) input).get(\"missing\"); return x;"); + fail("should have hit npe"); + } catch (NullPointerException expected) {} + } + + public void testInvalidShift() { + try { + exec("float x = 15F; x <<= 2; return x;"); + fail("should have hit cce"); + } catch (ClassCastException expected) {} + + try { + exec("double x = 15F; x <<= 2; return x;"); + fail("should have hit cce"); + } catch (ClassCastException expected) {} + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java new file mode 100644 index 00000000000..f10477dcd0a --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/XorTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +/** Tests for xor operator across all types */ +public class XorTests extends ScriptTestCase { + + public void testInt() throws Exception { + assertEquals(5 ^ 12, exec("int x = 5; int y = 12; return x ^ y;")); + assertEquals(5 ^ -12, exec("int x = 5; int y = -12; return x ^ y;")); + assertEquals(7 ^ 15 ^ 3, exec("int x = 7; int y = 15; int z = 3; return x ^ y ^ z;")); + } + + public void testIntConst() throws Exception { + assertEquals(5 ^ 12, exec("return 5 ^ 12;")); + assertEquals(5 ^ -12, exec("return 5 ^ -12;")); + assertEquals(7 ^ 15 ^ 3, exec("return 7 ^ 15 ^ 3;")); + } + + public void testLong() throws Exception { + assertEquals(5L ^ 12L, exec("long x = 5; long y = 12; return x ^ y;")); + assertEquals(5L ^ -12L, exec("long x = 5; long y = -12; return x ^ y;")); + assertEquals(7L ^ 15L ^ 3L, exec("long x = 7; long y = 15; long z = 3; return x ^ y ^ z;")); + } + + public void testLongConst() throws Exception { + assertEquals(5L ^ 12L, exec("return 5L ^ 12L;")); + assertEquals(5L ^ -12L, exec("return 5L ^ -12L;")); + assertEquals(7L ^ 15L ^ 3L, exec("return 7L ^ 15L ^ 3L;")); + } + + public void testBool() throws Exception { + assertEquals(false, exec("boolean x = true; boolean y = true; return x ^ y;")); + assertEquals(true, exec("boolean x = true; boolean y = false; return x ^ y;")); + assertEquals(true, exec("boolean x = false; boolean y = true; return x ^ y;")); + assertEquals(false, exec("boolean x = false; boolean y = false; return x ^ y;")); + } + + public void testBoolConst() throws Exception { + assertEquals(false, exec("return true ^ true;")); + assertEquals(true, exec("return true ^ false;")); + assertEquals(true, exec("return false ^ true;")); + assertEquals(false, exec("return false ^ false;")); + } +} diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml new file mode 100644 index 00000000000..6259780bfb4 --- /dev/null +++ b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml @@ -0,0 +1,14 @@ +# Integration tests for Plan A Plugin +# +"Plan A plugin loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: lang-plan-a } + - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml new file mode 100644 index 00000000000..0a5a3a4a8d4 --- /dev/null +++ b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/20_scriptfield.yaml @@ -0,0 +1,27 @@ +# Integration tests for using a scripted field +# +setup: + - do: + index: + index: test + type: test + id: 1 + body: { "foo": "aaa" } + - do: + indices.refresh: {} + +--- + +"Scripted Field": + - do: + search: + body: + script_fields: + bar: + script: + inline: "input.doc.foo.0 + input.x;" + lang: plan-a + params: + x: "bbb" + + - match: { hits.hits.0.fields.bar.0: "aaabbb"} diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml new file mode 100644 index 00000000000..a8d96a0d6fa --- /dev/null +++ b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/30_search.yaml @@ -0,0 +1,97 @@ +# Integration tests for Plan-A search scripting +# +"Plan-A Query": + - do: + index: + index: test + type: test + id: 1 + body: { "test": "value beck", "num1": 1.0 } + - do: + index: + index: test + type: test + id: 2 + body: { "test": "value beck", "num1": 2.0 } + - do: + index: + index: test + type: test + id: 3 + body: { "test": "value beck", "num1": 3.0 } + - do: + indices.refresh: {} + + - do: + index: test + search: + body: + query: + script: + script: + inline: "input.doc.num1.0 > 1;" + lang: plan-a + script_fields: + sNum1: + script: + inline: "input.doc.num1.0;" + lang: plan-a + sort: + num1: + order: asc + + - match: { hits.total: 2 } + - match: { hits.hits.0.fields.sNum1.0: 2.0 } + - match: { hits.hits.1.fields.sNum1.0: 3.0 } + + - do: + index: test + search: + body: + query: + script: + script: + inline: "input.doc.num1.0 > input.param1;" + lang: plan-a + params: + param1: 1 + + script_fields: + sNum1: + script: + inline: "return input.doc.num1.0;" + lang: plan-a + sort: + num1: + order: asc + + - match: { hits.total: 2 } + - match: { hits.hits.0.fields.sNum1.0: 2.0 } + - match: { hits.hits.1.fields.sNum1.0: 3.0 } + + - do: + index: test + search: + body: + query: + script: + script: + inline: "input.doc.num1.0 > input.param1;" + lang: plan-a + params: + param1: -1 + + script_fields: + sNum1: + script: + inline: "input.doc.num1.0;" + lang: plan-a + sort: + num1: + order: asc + + - match: { hits.total: 3 } + - match: { hits.hits.0.fields.sNum1.0: 1.0 } + - match: { hits.hits.1.fields.sNum1.0: 2.0 } + - match: { hits.hits.2.fields.sNum1.0: 3.0 } + diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index 34a57d7fbcb..b2b2c0cff5c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -631,6 +631,7 @@ public class PluginManagerTests extends ESIntegTestCase { PluginManager.checkForOfficialPlugins("analysis-stempel"); PluginManager.checkForOfficialPlugins("delete-by-query"); PluginManager.checkForOfficialPlugins("lang-javascript"); + PluginManager.checkForOfficialPlugins("lang-plan-a"); PluginManager.checkForOfficialPlugins("lang-python"); PluginManager.checkForOfficialPlugins("mapper-attachments"); PluginManager.checkForOfficialPlugins("mapper-murmur3"); diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index 48c34fa03af..54978b39605 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -223,6 +223,18 @@ fi install_and_check_plugin discovery multicast } +@test "[$GROUP] install lang-expression plugin" { + install_and_check_plugin lang expression +} + +@test "[$GROUP] install lang-groovy plugin" { + install_and_check_plugin lang groovy +} + +@test "[$GROUP] install lang-plan-a plugin" { + install_and_check_plugin lang plan-a +} + @test "[$GROUP] install javascript plugin" { install_and_check_plugin lang javascript rhino-*.jar } @@ -323,6 +335,18 @@ fi remove_plugin discovery-multicast } +@test "[$GROUP] remove lang-expression plugin" { + remove_plugin lang-expression +} + +@test "[$GROUP] remove lang-groovy plugin" { + remove_plugin lang-groovy +} + +@test "[$GROUP] remove lang-plan-a plugin" { + remove_plugin lang-plan-a +} + @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } diff --git a/settings.gradle b/settings.gradle index 0791c3d1752..0515e264a86 100644 --- a/settings.gradle +++ b/settings.gradle @@ -22,6 +22,7 @@ List projects = [ 'plugins:discovery-gce', 'plugins:discovery-multicast', 'plugins:lang-javascript', + 'plugins:lang-plan-a', 'plugins:lang-python', 'plugins:mapper-attachments', 'plugins:mapper-murmur3', From 025e9818e7c3f428aa46187572c40ce7f93fd197 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 18 Nov 2015 11:52:47 -0500 Subject: [PATCH 46/57] Refactors TransportReplicationAction to decouple request routing and shard operation logic --- .../indices/flush/ShardFlushRequest.java | 10 +- .../indices/flush/TransportFlushAction.java | 2 +- .../flush/TransportShardFlushAction.java | 28 +- .../refresh/TransportRefreshAction.java | 2 +- .../refresh/TransportShardRefreshAction.java | 26 +- .../action/bulk/BulkShardRequest.java | 11 +- .../action/bulk/TransportBulkAction.java | 4 +- .../action/bulk/TransportShardBulkAction.java | 59 +- .../action/delete/TransportDeleteAction.java | 43 +- .../action/index/TransportIndexAction.java | 40 +- .../replication/ReplicationRequest.java | 44 +- .../TransportReplicationAction.java | 785 ++++++++---------- .../TransportMultiTermVectorsAction.java | 5 +- .../cluster/routing/OperationRouting.java | 49 +- .../cluster/routing/RoutingTable.java | 20 + .../TransportReplicationActionTests.java | 265 ++++-- .../RoutingBackwardCompatibilityTests.java | 2 - .../WriteConsistencyLevelIT.java | 4 +- .../index/mapper/MapperServiceTests.java | 11 +- .../messy/tests/IndicesRequestTests.java | 10 +- 20 files changed, 727 insertions(+), 693 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java index 10db46c1da0..ccf06be8bd0 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/ShardFlushRequest.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; import java.io.IOException; @@ -29,8 +30,8 @@ public class ShardFlushRequest extends ReplicationRequest { private FlushRequest request = new FlushRequest(); - public ShardFlushRequest(FlushRequest request) { - super(request); + public ShardFlushRequest(FlushRequest request, ShardId shardId) { + super(request, shardId); this.request = request; } @@ -53,5 +54,8 @@ public class ShardFlushRequest extends ReplicationRequest { request.writeTo(out); } - + @Override + public String toString() { + return "flush {" + super.toString() + "}"; + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index ac159625420..00e03ffdf6e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -53,7 +53,7 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); - indexShard.flush(shardRequest.request.getRequest()); + protected Tuple shardOperationOnPrimary(MetaData metaData, ShardFlushRequest shardRequest) throws Throwable { + IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); + indexShard.flush(shardRequest.getRequest()); logger.trace("{} flush request executed on primary", indexShard.shardId()); - return new Tuple<>(new ActionWriteResponse(), shardRequest.request); + return new Tuple<>(new ActionWriteResponse(), shardRequest); } @Override - protected void shardOperationOnReplica(ShardId shardId, ShardFlushRequest request) { + protected void shardOperationOnReplica(ShardFlushRequest request) { IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); indexShard.flush(request.getRequest()); logger.trace("{} flush request executed on replica", indexShard.shardId()); @@ -81,18 +78,13 @@ public class TransportShardFlushAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); + protected Tuple shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable { + IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on primary", indexShard.shardId()); - return new Tuple<>(new ActionWriteResponse(), shardRequest.request); + return new Tuple<>(new ActionWriteResponse(), shardRequest); } @Override - protected void shardOperationOnReplica(ShardId shardId, ReplicationRequest request) { + protected void shardOperationOnReplica(ReplicationRequest request) { + final ShardId shardId = request.shardId(); IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on replica", indexShard.shardId()); @@ -82,18 +81,13 @@ public class TransportShardRefreshAction extends TransportReplicationAction { public BulkShardRequest() { } - BulkShardRequest(BulkRequest bulkRequest, String index, int shardId, boolean refresh, BulkItemRequest[] items) { - super(bulkRequest); - this.index = index; - this.setShardId(new ShardId(index, shardId)); + BulkShardRequest(BulkRequest bulkRequest, ShardId shardId, boolean refresh, BulkItemRequest[] items) { + super(bulkRequest, shardId); this.items = items; this.refresh = refresh; } @@ -93,4 +91,9 @@ public class BulkShardRequest extends ReplicationRequest { } refresh = in.readBoolean(); } + + @Override + public String toString() { + return "shard bulk {" + super.toString() + "}"; + } } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 51d32e3ff75..9b18d0328e7 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -275,7 +275,7 @@ public class TransportBulkAction extends HandledTransportAction list = requestsByShard.get(shardId); if (list == null) { list = new ArrayList<>(); @@ -312,7 +312,7 @@ public class TransportBulkAction extends HandledTransportAction> entry : requestsByShard.entrySet()) { final ShardId shardId = entry.getKey(); final List requests = entry.getValue(); - BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId.index().name(), shardId.id(), bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()])); + BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId, bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()])); bulkShardRequest.consistencyLevel(bulkRequest.consistencyLevel()); bulkShardRequest.timeout(bulkRequest.timeout()); shardBulkAction.execute(bulkShardRequest, new ActionListener() { diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index e51a1b938d8..2cc81556222 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -35,12 +35,11 @@ import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; @@ -87,11 +86,6 @@ public class TransportShardBulkAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) { - final BulkShardRequest request = shardRequest.request; + protected Tuple shardOperationOnPrimary(MetaData metaData, BulkShardRequest request) { final IndexService indexService = indicesService.indexServiceSafe(request.index()); - final IndexShard indexShard = indexService.getShard(shardRequest.shardId.id()); + final IndexShard indexShard = indexService.getShard(request.shardId().id()); long[] preVersions = new long[request.items().length]; VersionType[] preVersionTypes = new VersionType[request.items().length]; @@ -128,7 +116,7 @@ public class TransportShardBulkAction extends TransportReplicationAction result = shardIndexOperation(request, indexRequest, clusterState, indexShard, true); + WriteResult result = shardIndexOperation(request, indexRequest, metaData, indexShard, true); location = locationToSync(location, result.location); // add the response IndexResponse indexResponse = result.response(); @@ -143,9 +131,9 @@ public class TransportShardBulkAction extends TransportReplicationAction 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); - updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, shardRequest.request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); + updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); } item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest); setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse)); @@ -229,7 +217,7 @@ public class TransportShardBulkAction extends TransportReplicationAction(new BulkShardResponse(shardRequest.shardId, responses), shardRequest.request); + return new Tuple<>(new BulkShardResponse(request.shardId(), responses), request); } private void setResponse(BulkItemRequest request, BulkItemResponse response) { @@ -320,11 +308,11 @@ public class TransportShardBulkAction extends TransportReplicationAction shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState, + private WriteResult shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, MetaData metaData, IndexShard indexShard, boolean processed) throws Throwable { // validate, if routing is required, that we got routing - MappingMetaData mappingMd = clusterState.metaData().index(request.index()).mappingOrDefault(indexRequest.type()); + MappingMetaData mappingMd = metaData.index(request.index()).mappingOrDefault(indexRequest.type()); if (mappingMd != null && mappingMd.routing().required()) { if (indexRequest.routing() == null) { throw new RoutingMissingException(request.index(), indexRequest.type(), indexRequest.id()); @@ -332,7 +320,7 @@ public class TransportShardBulkAction extends TransportReplicationAction listener) { - request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index())); - if (state.metaData().hasIndex(request.concreteIndex())) { + protected void resolveRequest(final MetaData metaData, String concreteIndex, DeleteRequest request) { + request.routing(metaData.resolveIndexRouting(request.routing(), request.index())); + if (metaData.hasIndex(concreteIndex)) { // check if routing is required, if so, do a broadcast delete - MappingMetaData mappingMd = state.metaData().index(request.concreteIndex()).mappingOrDefault(request.request().type()); + MappingMetaData mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type()); if (mappingMd != null && mappingMd.routing().required()) { - if (request.request().routing() == null) { - if (request.request().versionType() != VersionType.INTERNAL) { + if (request.routing() == null) { + if (request.versionType() != VersionType.INTERNAL) { // TODO: implement this feature - throw new IllegalArgumentException("routing value is required for deleting documents of type [" + request.request().type() - + "] while using version_type [" + request.request().versionType() + "]"); + throw new IllegalArgumentException("routing value is required for deleting documents of type [" + request.type() + + "] while using version_type [" + request.versionType() + "]"); } - throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id()); + throw new RoutingMissingException(concreteIndex, request.type(), request.id()); } } } + ShardId shardId = clusterService.operationRouting().shardId(clusterService.state(), concreteIndex, request.id(), request.routing()); + request.setShardId(shardId); } private void innerExecute(final DeleteRequest request, final ActionListener listener) { super.doExecute(request, listener); } - @Override - protected boolean checkWriteConsistency() { - return true; - } - @Override protected DeleteResponse newResponseInstance() { return new DeleteResponse(); } @Override - protected Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) { - DeleteRequest request = shardRequest.request; - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); + protected Tuple shardOperationOnPrimary(MetaData metaData, DeleteRequest request) { + IndexShard indexShard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); final WriteResult result = executeDeleteRequestOnPrimary(request, indexShard); processAfterWrite(request.refresh(), indexShard, result.location); - return new Tuple<>(result.response, shardRequest.request); + return new Tuple<>(result.response, request); } public static WriteResult executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard indexShard) { @@ -154,17 +150,12 @@ public class TransportDeleteAction extends TransportReplicationAction indexResponseActionListener) { - MetaData metaData = clusterService.state().metaData(); - + protected void resolveRequest(MetaData metaData, String concreteIndex, IndexRequest request) { MappingMetaData mappingMd = null; - if (metaData.hasIndex(request.concreteIndex())) { - mappingMd = metaData.index(request.concreteIndex()).mappingOrDefault(request.request().type()); + if (metaData.hasIndex(concreteIndex)) { + mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type()); } - request.request().process(metaData, mappingMd, allowIdGeneration, request.concreteIndex()); + request.process(metaData, mappingMd, allowIdGeneration, concreteIndex); + ShardId shardId = clusterService.operationRouting().shardId(clusterService.state(), concreteIndex, request.id(), request.routing()); + request.setShardId(shardId); } private void innerExecute(final IndexRequest request, final ActionListener listener) { super.doExecute(request, listener); } - @Override - protected boolean checkWriteConsistency() { - return true; - } - @Override protected IndexResponse newResponseInstance() { return new IndexResponse(); } @Override - protected ShardIterator shards(ClusterState clusterState, InternalRequest request) { - return clusterService.operationRouting() - .indexShards(clusterService.state(), request.concreteIndex(), request.request().type(), request.request().id(), request.request().routing()); - } - - @Override - protected Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - final IndexRequest request = shardRequest.request; + protected Tuple shardOperationOnPrimary(MetaData metaData, IndexRequest request) throws Throwable { // validate, if routing is required, that we got routing - IndexMetaData indexMetaData = clusterState.metaData().index(shardRequest.shardId.getIndex()); + IndexMetaData indexMetaData = metaData.index(request.shardId().getIndex()); MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type()); if (mappingMd != null && mappingMd.routing().required()) { if (request.routing() == null) { - throw new RoutingMissingException(shardRequest.shardId.getIndex(), request.type(), request.id()); + throw new RoutingMissingException(request.shardId().getIndex(), request.type(), request.id()); } } - IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()); - IndexShard indexShard = indexService.getShard(shardRequest.shardId.id()); + IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + IndexShard indexShard = indexService.getShard(request.shardId().id()); final WriteResult result = executeIndexRequestOnPrimary(request, indexShard, mappingUpdatedAction); final IndexResponse response = result.response; final Translog.Location location = result.location; processAfterWrite(request.refresh(), indexShard, location); - return new Tuple<>(response, shardRequest.request); + return new Tuple<>(response, request); } @Override - protected void shardOperationOnReplica(ShardId shardId, IndexRequest request) { + protected void shardOperationOnReplica(IndexRequest request) { + final ShardId shardId = request.shardId(); IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); final Engine.Index operation = executeIndexRequestOnReplica(request, indexShard); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index c629a70d6f9..adbe199824e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -42,7 +42,12 @@ public class ReplicationRequest extends ActionRequ public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); - ShardId internalShardId; + /** + * Target shard the request should execute on. In case of index and delete requests, + * shard id gets resolved by the transport action before performing request operation + * and at request creation time for shard-level bulk, refresh and flush requests. + */ + protected ShardId shardId; protected TimeValue timeout = DEFAULT_TIMEOUT; protected String index; @@ -60,6 +65,15 @@ public class ReplicationRequest extends ActionRequ super(request); } + /** + * Creates a new request with resolved shard id + */ + public ReplicationRequest(ActionRequest request, ShardId shardId) { + super(request); + this.index = shardId.getIndex(); + this.shardId = shardId; + } + /** * Copy constructor that creates a new request that is a copy of the one provided as an argument. */ @@ -124,12 +138,12 @@ public class ReplicationRequest extends ActionRequ /** * @return the shardId of the shard where this operation should be executed on. - * can be null in case the shardId is determined by a single document (index, type, id) for example for index or delete request. + * can be null if the shardID has not yet been resolved */ public @Nullable ShardId shardId() { - return internalShardId; + return shardId; } /** @@ -154,9 +168,9 @@ public class ReplicationRequest extends ActionRequ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); if (in.readBoolean()) { - internalShardId = ShardId.readShardId(in); + shardId = ShardId.readShardId(in); } else { - internalShardId = null; + shardId = null; } consistencyLevel = WriteConsistencyLevel.fromId(in.readByte()); timeout = TimeValue.readTimeValue(in); @@ -166,9 +180,9 @@ public class ReplicationRequest extends ActionRequ @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (internalShardId != null) { + if (shardId != null) { out.writeBoolean(true); - internalShardId.writeTo(out); + shardId.writeTo(out); } else { out.writeBoolean(false); } @@ -177,9 +191,21 @@ public class ReplicationRequest extends ActionRequ out.writeString(index); } + /** + * Sets the target shard id for the request. The shard id is set when a + * index/delete request is resolved by the transport action + */ public T setShardId(ShardId shardId) { - this.internalShardId = shardId; - this.index = shardId.getIndex(); + this.shardId = shardId; return (T) this; } + + @Override + public String toString() { + if (shardId != null) { + return shardId.toString(); + } else { + return index; + } + } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 3b4d860f31e..2f9fd6d483c 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -37,11 +37,10 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.IndexRoutingTable; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; @@ -63,6 +62,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -70,6 +71,12 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; /** + * Base class for requests that should be executed on a primary copy followed by replica copies. + * Subclasses can resolve the target shard and provide implementation for primary and replica operations. + * + * The action samples cluster state on the receiving node to reroute to node with primary copy and on the + * primary node to validate request before primary operation followed by sampling state again for resolving + * nodes with replica copies to perform replication. */ public abstract class TransportReplicationAction extends TransportAction { @@ -85,6 +92,7 @@ public abstract class TransportReplicationAction listener) { - new PrimaryPhase(request, listener).run(); + new ReroutePhase(request, listener).run(); } protected abstract Response newResponseInstance(); /** + * Resolves the target shard id of the incoming request. + * Additional processing or validation of the request should be done here. + */ + protected void resolveRequest(MetaData metaData, String concreteIndex, Request request) { + // implementation should be provided if request shardID is not already resolved at request construction + } + + /** + * Primary operation on node with primary copy, the provided metadata should be used for request validation if needed * @return A tuple containing not null values, as first value the result of the primary operation and as second value * the request to be executed on the replica shards. */ - protected abstract Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable; + protected abstract Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable; - protected abstract void shardOperationOnReplica(ShardId shardId, ReplicaRequest shardRequest); + /** + * Replica operation on nodes with replica copies + */ + protected abstract void shardOperationOnReplica(ReplicaRequest shardRequest); - protected abstract ShardIterator shards(ClusterState clusterState, InternalRequest request); - - protected abstract boolean checkWriteConsistency(); - - protected ClusterBlockException checkGlobalBlock(ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); - } - - protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) { - return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.concreteIndex()); - } - - protected boolean resolveIndex() { + /** + * True if write consistency should be checked for an implementation + */ + protected boolean checkWriteConsistency() { return true; } /** - * Resolves the request, by default doing nothing. Can be subclassed to do - * additional processing or validation depending on the incoming request + * Cluster level block to check before request execution */ - protected void resolveRequest(ClusterState state, InternalRequest request, ActionListener listener) { + protected ClusterBlockLevel globalBlockLevel() { + return ClusterBlockLevel.WRITE; + } + + /** + * Index level block to check before request execution + */ + protected ClusterBlockLevel indexBlockLevel() { + return ClusterBlockLevel.WRITE; + } + + /** + * True if provided index should be resolved when resolving request + */ + protected boolean resolveIndex() { + return true; } protected TransportRequestOptions transportOptions() { @@ -233,6 +259,13 @@ public abstract class TransportReplicationAction { + @Override + public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + new PrimaryPhase(request, channel).run(); + } + } + class ReplicaOperationTransportHandler implements TransportRequestHandler { @Override public void messageReceived(final ReplicaRequest request, final TransportChannel channel) throws Exception { @@ -259,7 +292,6 @@ public abstract class TransportReplicationAction - * Note that as soon as we start sending request to replicas, state responsibility is transferred to {@link ReplicationPhase} + * Responsible for routing and retrying failed operations on the primary. + * The actual primary operation is done in {@link PrimaryPhase} on the + * node with primary copy. + * + * Resolves index and shard id for the request before routing it to target node */ - final class PrimaryPhase extends AbstractRunnable { + final class ReroutePhase extends AbstractRunnable { private final ActionListener listener; - private final InternalRequest internalRequest; + private final Request request; private final ClusterStateObserver observer; - private final AtomicBoolean finished = new AtomicBoolean(false); - private volatile Releasable indexShardReference; + private final AtomicBoolean finished = new AtomicBoolean(); - PrimaryPhase(Request request, ActionListener listener) { - this.internalRequest = new InternalRequest(request); + ReroutePhase(Request request, ActionListener listener) { + this.request = request; this.listener = listener; - this.observer = new ClusterStateObserver(clusterService, internalRequest.request().timeout(), logger); + this.observer = new ClusterStateObserver(clusterService, request.timeout(), logger); } @Override @@ -361,137 +405,93 @@ public abstract class TransportReplicationAction() { - - @Override - public Response newInstance() { - return newResponseInstance(); - } - - @Override - public String executor() { - return ThreadPool.Names.SAME; - } - - @Override - public void handleResponse(Response response) { - finishOnRemoteSuccess(response); - } - - @Override - public void handleException(TransportException exp) { - try { - // if we got disconnected from the node, or the node / shard is not in the right state (being closed) - if (exp.unwrapCause() instanceof ConnectTransportException || exp.unwrapCause() instanceof NodeClosedException || - retryPrimaryException(exp)) { - // we already marked it as started when we executed it (removed the listener) so pass false - // to re-add to the cluster listener - logger.trace("received an error from node the primary was assigned to ({}), scheduling a retry", exp.getMessage()); - retry(exp); - } else { - finishAsFailed(exp); - } - } catch (Throwable t) { - finishWithUnexpectedFailure(t); - } - } - }); + if (logger.isTraceEnabled()) { + logger.trace("send action [{}] on primary [{}] for request [{}] with cluster state version [{}] to [{}]", actionName, request.shardId(), request, state.version(), primary.currentNodeId()); + } + performAction(node, actionName, false); } } + private void handleBlockException(ClusterBlockException blockException) { + if (blockException.retryable()) { + logger.trace("cluster is blocked ({}), scheduling a retry", blockException.getMessage()); + retry(blockException); + } else { + finishAsFailed(blockException); + } + } + + private void performAction(final DiscoveryNode node, final String action, final boolean isPrimaryAction) { + transportService.sendRequest(node, action, request, transportOptions, new BaseTransportResponseHandler() { + + @Override + public Response newInstance() { + return newResponseInstance(); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + + @Override + public void handleResponse(Response response) { + finishOnSuccess(response); + } + + @Override + public void handleException(TransportException exp) { + try { + // if we got disconnected from the node, or the node / shard is not in the right state (being closed) + if (exp.unwrapCause() instanceof ConnectTransportException || exp.unwrapCause() instanceof NodeClosedException || + (isPrimaryAction && retryPrimaryException(exp.unwrapCause()))) { + logger.trace("received an error from node [{}] for request [{}], scheduling a retry", exp, node.id(), request); + retry(exp); + } else { + finishAsFailed(exp); + } + } catch (Throwable t) { + finishWithUnexpectedFailure(t); + } + } + }); + } + void retry(Throwable failure) { assert failure != null; if (observer.isTimedOut()) { @@ -518,22 +518,9 @@ public abstract class TransportReplicationAction + * Note that as soon as we move to replication action, state responsibility is transferred to {@link ReplicationPhase}. + */ + final class PrimaryPhase extends AbstractRunnable { + private final Request request; + private final TransportChannel channel; + private final ClusterState state; + private final AtomicBoolean finished = new AtomicBoolean(); + private Releasable indexShardReference; + + PrimaryPhase(Request request, TransportChannel channel) { + this.state = clusterService.state(); + this.request = request; + this.channel = channel; + } + + @Override + public void onFailure(Throwable e) { + finishAsFailed(e); + } + + @Override + protected void doRun() throws Exception { + // request shardID was set in ReroutePhase + assert request.shardId() != null : "request shardID must be set prior to primary phase"; + final ShardId shardId = request.shardId(); + final String writeConsistencyFailure = checkWriteConsistency(shardId); if (writeConsistencyFailure != null) { - retryBecauseUnavailable(primary.shardId(), writeConsistencyFailure); + finishBecauseUnavailable(shardId, writeConsistencyFailure); return; } final ReplicationPhase replicationPhase; try { - indexShardReference = getIndexShardOperationsCounter(primary.shardId()); - PrimaryOperationRequest por = new PrimaryOperationRequest(primary.id(), internalRequest.concreteIndex(), internalRequest.request()); - Tuple primaryResponse = shardOperationOnPrimary(observer.observedState(), por); + indexShardReference = getIndexShardOperationsCounter(shardId); + Tuple primaryResponse = shardOperationOnPrimary(state.metaData(), request); if (logger.isTraceEnabled()) { - logger.trace("operation completed on primary [{}], action [{}], request [{}], cluster state version [{}]", primary, actionName, por.request, observer.observedState().version()); + logger.trace("action [{}] completed on shard [{}] for request [{}] with cluster state version [{}]", transportPrimaryAction, shardId, request, state.version()); } - replicationPhase = new ReplicationPhase(shardsIt, primaryResponse.v2(), primaryResponse.v1(), observer, primary, internalRequest, listener, indexShardReference, shardFailedTimeout); + replicationPhase = new ReplicationPhase(primaryResponse.v2(), primaryResponse.v1(), shardId, channel, indexShardReference, shardFailedTimeout); } catch (Throwable e) { - // shard has not been allocated yet, retry it here - if (retryPrimaryException(e)) { - logger.trace("had an error while performing operation on primary ({}, action [{}], request [{}]), scheduling a retry.", e, primary, actionName, internalRequest.request); - // We have to close here because when we retry we will increment get a new reference on index shard again and we do not want to - // increment twice. - Releasables.close(indexShardReference); - // We have to reset to null here because whe we retry it might be that we never get to the point where we assign a new reference - // (for example, in case the operation was rejected because queue is full). In this case we would release again once one of the finish methods is called. - indexShardReference = null; - retry(e); - return; - } if (ExceptionsHelper.status(e) == RestStatus.CONFLICT) { if (logger.isTraceEnabled()) { - logger.trace(primary.shortSummary() + ": Failed to execute [" + internalRequest.request() + "]", e); + logger.trace("failed to execute [{}] on [{}]", e, request, shardId); } } else { if (logger.isDebugEnabled()) { - logger.debug(primary.shortSummary() + ": Failed to execute [" + internalRequest.request() + "]", e); + logger.debug("failed to execute [{}] on [{}]", e, request, shardId); } } finishAsFailed(e); @@ -611,22 +613,22 @@ public abstract class TransportReplicationAction 2) { @@ -648,17 +650,44 @@ public abstract class TransportReplicationAction listener; - private final AtomicBoolean finished = new AtomicBoolean(false); + private final TransportChannel channel; + private final ShardId shardId; + private final List shards; + private final DiscoveryNodes nodes; + private final boolean executeOnReplica; + private final String indexUUID; + private final AtomicBoolean finished = new AtomicBoolean(); private final AtomicInteger success = new AtomicInteger(1); // We already wrote into the primary shard private final ConcurrentMap shardReplicaFailures = ConcurrentCollections.newConcurrentMap(); - private final IndexMetaData indexMetaData; - private final ShardRouting originalPrimaryShard; private final AtomicInteger pending; private final int totalShards; - private final ClusterStateObserver observer; private final Releasable indexShardReference; private final TimeValue shardFailedTimeout; - /** - * the constructor doesn't take any action, just calculates state. Call {@link #run()} to start - * replicating. - */ - public ReplicationPhase(ShardIterator originalShardIt, ReplicaRequest replicaRequest, Response finalResponse, - ClusterStateObserver observer, ShardRouting originalPrimaryShard, - InternalRequest internalRequest, ActionListener listener, Releasable indexShardReference, - TimeValue shardFailedTimeout) { + public ReplicationPhase(ReplicaRequest replicaRequest, Response finalResponse, ShardId shardId, + TransportChannel channel, Releasable indexShardReference, TimeValue shardFailedTimeout) { this.replicaRequest = replicaRequest; - this.listener = listener; + this.channel = channel; this.finalResponse = finalResponse; - this.originalPrimaryShard = originalPrimaryShard; - this.observer = observer; - indexMetaData = observer.observedState().metaData().index(internalRequest.concreteIndex()); this.indexShardReference = indexShardReference; this.shardFailedTimeout = shardFailedTimeout; + this.shardId = shardId; - ShardRouting shard; - // we double check on the state, if it got changed we need to make sure we take the latest one cause - // maybe a replica shard started its recovery process and we need to apply it there... + // we have to get a new state after successfully indexing into the primary in order to honour recovery semantics. + // we have to make sure that every operation indexed into the primary after recovery start will also be replicated + // to the recovery target. If we use an old cluster state, we may miss a relocation that has started since then. + // If the index gets deleted after primary operation, we skip replication + final ClusterState state = clusterService.state(); + final IndexRoutingTable index = state.getRoutingTable().index(shardId.getIndex()); + final IndexShardRoutingTable shardRoutingTable = (index != null) ? index.shard(shardId.id()) : null; + final IndexMetaData indexMetaData = state.getMetaData().index(shardId.getIndex()); + this.shards = (shardRoutingTable != null) ? shardRoutingTable.shards() : Collections.emptyList(); + this.executeOnReplica = (indexMetaData == null) || shouldExecuteReplication(indexMetaData.getSettings()); + this.indexUUID = (indexMetaData != null) ? indexMetaData.getIndexUUID() : null; + this.nodes = state.getNodes(); - // we also need to make sure if the new state has a new primary shard (that we indexed to before) started - // and assigned to another node (while the indexing happened). In that case, we want to apply it on the - // new primary shard as well... - ClusterState newState = clusterService.state(); + if (shards.isEmpty()) { + logger.debug("replication phase for request [{}] on [{}] is skipped due to index deletion after primary operation", replicaRequest, shardId); + } - int numberOfUnassignedOrIgnoredReplicas = 0; + // we calculate number of target nodes to send replication operations, including nodes with relocating shards + int numberOfIgnoredShardInstances = 0; int numberOfPendingShardInstances = 0; - if (observer.observedState() != newState) { - observer.reset(newState); - shardIt = shards(newState, internalRequest); - while ((shard = shardIt.nextOrNull()) != null) { - if (shard.primary()) { - if (originalPrimaryShard.currentNodeId().equals(shard.currentNodeId()) == false) { - // there is a new primary, we'll have to replicate to it. - numberOfPendingShardInstances++; - } - if (shard.relocating()) { - numberOfPendingShardInstances++; - } - } else if (shouldExecuteReplication(indexMetaData.getSettings()) == false) { - // If the replicas use shadow replicas, there is no reason to - // perform the action on the replica, so skip it and - // immediately return - - // this delays mapping updates on replicas because they have - // to wait until they get the new mapping through the cluster - // state, which is why we recommend pre-defined mappings for - // indices using shadow replicas - numberOfUnassignedOrIgnoredReplicas++; - } else if (shard.unassigned()) { - numberOfUnassignedOrIgnoredReplicas++; - } else if (shard.relocating()) { - // we need to send to two copies - numberOfPendingShardInstances += 2; - } else { + for (ShardRouting shard : shards) { + if (shard.primary() == false && executeOnReplica == false) { + numberOfIgnoredShardInstances++; + } else if (shard.unassigned()) { + numberOfIgnoredShardInstances++; + } else { + if (shard.currentNodeId().equals(nodes.localNodeId()) == false) { numberOfPendingShardInstances++; } - } - } else { - shardIt = originalShardIt; - shardIt.reset(); - while ((shard = shardIt.nextOrNull()) != null) { - if (shard.unassigned()) { - numberOfUnassignedOrIgnoredReplicas++; - } else if (shard.primary()) { - if (shard.relocating()) { - // we have to replicate to the other copy - numberOfPendingShardInstances += 1; - } - } else if (shouldExecuteReplication(indexMetaData.getSettings()) == false) { - // If the replicas use shadow replicas, there is no reason to - // perform the action on the replica, so skip it and - // immediately return - - // this delays mapping updates on replicas because they have - // to wait until they get the new mapping through the cluster - // state, which is why we recommend pre-defined mappings for - // indices using shadow replicas - numberOfUnassignedOrIgnoredReplicas++; - } else if (shard.relocating()) { - // we need to send to two copies - numberOfPendingShardInstances += 2; - } else { + if (shard.relocating()) { numberOfPendingShardInstances++; } } } - - // one for the primary already done - this.totalShards = 1 + numberOfPendingShardInstances + numberOfUnassignedOrIgnoredReplicas; + // one for the local primary copy + this.totalShards = 1 + numberOfPendingShardInstances + numberOfIgnoredShardInstances; this.pending = new AtomicInteger(numberOfPendingShardInstances); + if (logger.isTraceEnabled()) { + logger.trace("replication phase started. pending [{}], action [{}], request [{}], cluster state version used [{}]", pending.get(), + transportReplicaAction, replicaRequest, state.version()); + } } /** @@ -821,114 +793,84 @@ public abstract class TransportReplicationAction effectiveRouting = routing.get(index); if (effectiveRouting != null) { for (String r : effectiveRouting) { - int shardId = shardId(clusterState, index, null, null, r); + int shardId = generateShardId(clusterState, index, null, r); IndexShardRoutingTable indexShard = indexRouting.shard(shardId); if (indexShard == null) { throw new ShardNotFoundException(new ShardId(index, shardId)); @@ -200,14 +196,6 @@ public class OperationRouting extends AbstractComponent { } } - public IndexMetaData indexMetaData(ClusterState clusterState, String index) { - IndexMetaData indexMetaData = clusterState.metaData().index(index); - if (indexMetaData == null) { - throw new IndexNotFoundException(index); - } - return indexMetaData; - } - protected IndexRoutingTable indexRoutingTable(ClusterState clusterState, String index) { IndexRoutingTable indexRouting = clusterState.routingTable().index(index); if (indexRouting == null) { @@ -216,25 +204,20 @@ public class OperationRouting extends AbstractComponent { return indexRouting; } - - // either routing is set, or type/id are set - - protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String type, String id, String routing) { - int shardId = shardId(clusterState, index, type, id, routing); - return shards(clusterState, index, shardId); + protected IndexShardRoutingTable shards(ClusterState clusterState, String index, String id, String routing) { + int shardId = generateShardId(clusterState, index, id, routing); + return clusterState.getRoutingTable().shardRoutingTable(index, shardId); } - protected IndexShardRoutingTable shards(ClusterState clusterState, String index, int shardId) { - IndexShardRoutingTable indexShard = indexRoutingTable(clusterState, index).shard(shardId); - if (indexShard == null) { - throw new ShardNotFoundException(new ShardId(index, shardId)); + public ShardId shardId(ClusterState clusterState, String index, String id, @Nullable String routing) { + return new ShardId(index, generateShardId(clusterState, index, id, routing)); + } + + private int generateShardId(ClusterState clusterState, String index, String id, @Nullable String routing) { + IndexMetaData indexMetaData = clusterState.metaData().index(index); + if (indexMetaData == null) { + throw new IndexNotFoundException(index); } - return indexShard; - } - - @SuppressForbidden(reason = "Math#abs is trappy") - private int shardId(ClusterState clusterState, String index, String type, String id, @Nullable String routing) { - final IndexMetaData indexMetaData = indexMetaData(clusterState, index); final int hash; if (routing == null) { hash = Murmur3HashFunction.hash(id); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index c210539bc58..fbabacd79fd 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -33,6 +33,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import java.io.IOException; import java.util.ArrayList; @@ -95,6 +97,24 @@ public class RoutingTable implements Iterable, Diffable.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); - assertFalse("primary phase should stop execution", primaryPhase.checkBlocks()); + TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class); block = ClusterBlocks.builder() .addGlobalBlock(new ClusterBlock(1, "retryable", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL)); clusterService.setState(ClusterState.builder(clusterService.state()).blocks(block)); listener = new PlainActionFuture<>(); - primaryPhase = action.new PrimaryPhase(new Request().timeout("5ms"), listener); - assertFalse("primary phase should stop execution on retryable block", primaryPhase.checkBlocks()); + reroutePhase = action.new ReroutePhase(new Request().timeout("5ms"), listener); + reroutePhase.run(); assertListenerThrows("failed to timeout on retryable block", listener, ClusterBlockException.class); listener = new PlainActionFuture<>(); - primaryPhase = action.new PrimaryPhase(new Request(), listener); - assertFalse("primary phase should stop execution on retryable block", primaryPhase.checkBlocks()); + reroutePhase = action.new ReroutePhase(new Request(), listener); + reroutePhase.run(); assertFalse("primary phase should wait on retryable block", listener.isDone()); block = ClusterBlocks.builder() @@ -172,25 +170,47 @@ public class TransportReplicationActionTests extends ESTestCase { Request request = new Request(shardId).timeout("1ms"); PlainActionFuture listener = new PlainActionFuture<>(); - TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); - primaryPhase.run(); + TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); assertListenerThrows("unassigned primary didn't cause a timeout", listener, UnavailableShardsException.class); request = new Request(shardId); listener = new PlainActionFuture<>(); - primaryPhase = action.new PrimaryPhase(request, listener); - primaryPhase.run(); + reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); assertFalse("unassigned primary didn't cause a retry", listener.isDone()); clusterService.setState(state(index, true, ShardRoutingState.STARTED)); logger.debug("--> primary assigned state:\n{}", clusterService.state().prettyPrint()); - listener.get(); - assertTrue("request wasn't processed on primary, despite of it being assigned", request.processedOnPrimary.get()); + final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); + final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); + final List capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId); + assertThat(capturedRequests, notNullValue()); + assertThat(capturedRequests.size(), equalTo(1)); + assertThat(capturedRequests.get(0).action, equalTo("testAction[p]")); assertIndexShardCounter(1); } - public void testRoutingToPrimary() { + public void testUnknownIndexOrShardOnReroute() throws InterruptedException { + final String index = "test"; + // no replicas in oder to skip the replication part + clusterService.setState(state(index, true, + randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED)); + logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); + Request request = new Request(new ShardId("unknown_index", 0)).timeout("1ms"); + PlainActionFuture listener = new PlainActionFuture<>(); + TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); + assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class); + request = new Request(new ShardId(index, 10)).timeout("1ms"); + listener = new PlainActionFuture<>(); + reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); + assertListenerThrows("must throw shard not found exception", listener, ShardNotFoundException.class); + } + + public void testRoutePhaseExecutesRequest() { final String index = "test"; final ShardId shardId = new ShardId(index, 0); @@ -203,25 +223,126 @@ public class TransportReplicationActionTests extends ESTestCase { Request request = new Request(shardId); PlainActionFuture listener = new PlainActionFuture<>(); - TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); - assertTrue(primaryPhase.checkBlocks()); - primaryPhase.routeRequestOrPerformLocally(shardRoutingTable.primaryShard(), shardRoutingTable.shardsIt()); - if (primaryNodeId.equals(clusterService.localNode().id())) { - logger.info("--> primary is assigned locally, testing for execution"); - assertTrue("request failed to be processed on a local primary", request.processedOnPrimary.get()); - if (transport.capturedRequests().length > 0) { - assertIndexShardCounter(2); - } else { - assertIndexShardCounter(1); - } + TransportReplicationAction.ReroutePhase reroutePhase = action.new ReroutePhase(request, listener); + reroutePhase.run(); + assertThat(request.shardId(), equalTo(shardId)); + logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId); + final List capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId); + assertThat(capturedRequests, notNullValue()); + assertThat(capturedRequests.size(), equalTo(1)); + if (clusterService.state().nodes().localNodeId().equals(primaryNodeId)) { + assertThat(capturedRequests.get(0).action, equalTo("testAction[p]")); } else { - logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId); - final List capturedRequests = transport.capturedRequestsByTargetNode().get(primaryNodeId); - assertThat(capturedRequests, notNullValue()); - assertThat(capturedRequests.size(), equalTo(1)); assertThat(capturedRequests.get(0).action, equalTo("testAction")); - assertIndexShardUninitialized(); } + assertIndexShardUninitialized(); + } + + public void testPrimaryPhaseExecutesRequest() throws InterruptedException, ExecutionException { + final String index = "test"; + final ShardId shardId = new ShardId(index, 0); + clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); + Request request = new Request(shardId).timeout("1ms"); + PlainActionFuture listener = new PlainActionFuture<>(); + TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); + primaryPhase.run(); + assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true)); + final String replicaNodeId = clusterService.state().getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0).currentNodeId(); + final List requests = transport.capturedRequestsByTargetNode().get(replicaNodeId); + assertThat(requests, notNullValue()); + assertThat(requests.size(), equalTo(1)); + assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]")); + } + + public void testAddedReplicaAfterPrimaryOperation() { + final String index = "test"; + final ShardId shardId = new ShardId(index, 0); + // start with no replicas + clusterService.setState(stateWithStartedPrimary(index, true, 0)); + logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); + final ClusterState stateWithAddedReplicas = state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED); + + final Action actionWithAddedReplicaAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { + @Override + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { + final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); + // add replicas after primary operation + ((TestClusterService) clusterService).setState(stateWithAddedReplicas); + logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); + return operationOnPrimary; + } + }; + + Request request = new Request(shardId); + PlainActionFuture listener = new PlainActionFuture<>(); + TransportReplicationAction.PrimaryPhase primaryPhase = actionWithAddedReplicaAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener)); + primaryPhase.run(); + assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true)); + for (ShardRouting replica : stateWithAddedReplicas.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards()) { + List requests = transport.capturedRequestsByTargetNode().get(replica.currentNodeId()); + assertThat(requests, notNullValue()); + assertThat(requests.size(), equalTo(1)); + assertThat("replica request was not sent", requests.get(0).action, equalTo("testAction[r]")); + } + } + + public void testRelocatingReplicaAfterPrimaryOperation() { + final String index = "test"; + final ShardId shardId = new ShardId(index, 0); + // start with a replica + clusterService.setState(state(index, true, ShardRoutingState.STARTED, randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.STARTED)); + logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); + final ClusterState stateWithRelocatingReplica = state(index, true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); + + final Action actionWithRelocatingReplicasAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { + @Override + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { + final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); + // set replica to relocating + ((TestClusterService) clusterService).setState(stateWithRelocatingReplica); + logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); + return operationOnPrimary; + } + }; + + Request request = new Request(shardId); + PlainActionFuture listener = new PlainActionFuture<>(); + TransportReplicationAction.PrimaryPhase primaryPhase = actionWithRelocatingReplicasAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener)); + primaryPhase.run(); + assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true)); + ShardRouting relocatingReplicaShard = stateWithRelocatingReplica.getRoutingTable().shardRoutingTable(index, shardId.id()).replicaShards().get(0); + for (String node : new String[] {relocatingReplicaShard.currentNodeId(), relocatingReplicaShard.relocatingNodeId()}) { + List requests = transport.capturedRequestsByTargetNode().get(node); + assertThat(requests, notNullValue()); + assertThat(requests.size(), equalTo(1)); + assertThat("replica request was not sent to replica", requests.get(0).action, equalTo("testAction[r]")); + } + } + + public void testIndexDeletedAfterPrimaryOperation() { + final String index = "test"; + final ShardId shardId = new ShardId(index, 0); + clusterService.setState(state(index, true, ShardRoutingState.STARTED, ShardRoutingState.STARTED)); + logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); + final ClusterState stateWithDeletedIndex = state(index + "_new", true, ShardRoutingState.STARTED, ShardRoutingState.RELOCATING); + + final Action actionWithDeletedIndexAfterPrimaryOp = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { + @Override + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { + final Tuple operationOnPrimary = super.shardOperationOnPrimary(metaData, shardRequest); + // delete index after primary op + ((TestClusterService) clusterService).setState(stateWithDeletedIndex); + logger.debug("--> state after primary operation:\n{}", clusterService.state().prettyPrint()); + return operationOnPrimary; + } + }; + + Request request = new Request(shardId); + PlainActionFuture listener = new PlainActionFuture<>(); + TransportReplicationAction.PrimaryPhase primaryPhase = actionWithDeletedIndexAfterPrimaryOp.new PrimaryPhase(request, createTransportChannel(listener)); + primaryPhase.run(); + assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true)); + assertThat("replication phase should be skipped if index gets deleted after primary operation", transport.capturedRequestsByTargetNode().size(), equalTo(0)); } public void testWriteConsistency() throws ExecutionException, InterruptedException { @@ -266,10 +387,9 @@ public class TransportReplicationActionTests extends ESTestCase { final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id()); PlainActionFuture listener = new PlainActionFuture<>(); - - TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); + TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); if (passesWriteConsistency) { - assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard()), nullValue()); + assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), nullValue()); primaryPhase.run(); assertTrue("operations should have been perform, consistency level is met", request.processedOnPrimary.get()); if (assignedReplicas > 0) { @@ -278,14 +398,18 @@ public class TransportReplicationActionTests extends ESTestCase { assertIndexShardCounter(1); } } else { - assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard()), notNullValue()); + assertThat(primaryPhase.checkWriteConsistency(shardRoutingTable.primaryShard().shardId()), notNullValue()); primaryPhase.run(); assertFalse("operations should not have been perform, consistency level is *NOT* met", request.processedOnPrimary.get()); + assertListenerThrows("should throw exception to trigger retry", listener, UnavailableShardsException.class); assertIndexShardUninitialized(); for (int i = 0; i < replicaStates.length; i++) { replicaStates[i] = ShardRoutingState.STARTED; } clusterService.setState(state(index, true, ShardRoutingState.STARTED, replicaStates)); + listener = new PlainActionFuture<>(); + primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); + primaryPhase.run(); assertTrue("once the consistency level met, operation should continue", request.processedOnPrimary.get()); assertIndexShardCounter(2); } @@ -340,23 +464,19 @@ public class TransportReplicationActionTests extends ESTestCase { protected void runReplicateTest(IndexShardRoutingTable shardRoutingTable, int assignedReplicas, int totalShards) throws InterruptedException, ExecutionException { - final ShardRouting primaryShard = shardRoutingTable.primaryShard(); final ShardIterator shardIt = shardRoutingTable.shardsIt(); final ShardId shardId = shardIt.shardId(); - final Request request = new Request(); - PlainActionFuture listener = new PlainActionFuture<>(); - + final Request request = new Request(shardId); + final PlainActionFuture listener = new PlainActionFuture<>(); logger.debug("expecting [{}] assigned replicas, [{}] total shards. using state: \n{}", assignedReplicas, totalShards, clusterService.state().prettyPrint()); - final TransportReplicationAction.InternalRequest internalRequest = action.new InternalRequest(request); - internalRequest.concreteIndex(shardId.index().name()); Releasable reference = getOrCreateIndexShardOperationsCounter(); assertIndexShardCounter(2); // TODO: set a default timeout TransportReplicationAction.ReplicationPhase replicationPhase = - action.new ReplicationPhase(shardIt, request, - new Response(), new ClusterStateObserver(clusterService, logger), - primaryShard, internalRequest, listener, reference, null); + action.new ReplicationPhase(request, + new Response(), + request.shardId(), createTransportChannel(listener), reference, null); assertThat(replicationPhase.totalShards(), equalTo(totalShards)); assertThat(replicationPhase.pending(), equalTo(assignedReplicas)); @@ -433,7 +553,7 @@ public class TransportReplicationActionTests extends ESTestCase { * However, this failure would only become apparent once listener.get is called. Seems a little implicit. * */ action = new ActionWithDelay(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); - final TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); + final TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); Thread t = new Thread() { @Override public void run() { @@ -464,7 +584,7 @@ public class TransportReplicationActionTests extends ESTestCase { logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(shardId).timeout("100ms"); PlainActionFuture listener = new PlainActionFuture<>(); - TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); + TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); primaryPhase.run(); assertIndexShardCounter(2); assertThat(transport.capturedRequests().length, equalTo(1)); @@ -473,7 +593,7 @@ public class TransportReplicationActionTests extends ESTestCase { assertIndexShardCounter(1); transport.clear(); request = new Request(shardId).timeout("100ms"); - primaryPhase = action.new PrimaryPhase(request, listener); + primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); primaryPhase.run(); assertIndexShardCounter(2); CapturingTransport.CapturedRequest[] replicationRequests = transport.capturedRequests(); @@ -498,7 +618,7 @@ public class TransportReplicationActionTests extends ESTestCase { @Override public void run() { try { - replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel()); + replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel(new PlainActionFuture<>())); } catch (Exception e) { } } @@ -515,7 +635,7 @@ public class TransportReplicationActionTests extends ESTestCase { action = new ActionWithExceptions(Settings.EMPTY, "testActionWithExceptions", transportService, clusterService, threadPool); final Action.ReplicaOperationTransportHandler replicaOperationTransportHandlerForException = action.new ReplicaOperationTransportHandler(); try { - replicaOperationTransportHandlerForException.messageReceived(new Request(shardId), createTransportChannel()); + replicaOperationTransportHandlerForException.messageReceived(new Request(shardId), createTransportChannel(new PlainActionFuture<>())); fail(); } catch (Throwable t2) { } @@ -531,7 +651,7 @@ public class TransportReplicationActionTests extends ESTestCase { logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Request request = new Request(shardId).timeout("100ms"); PlainActionFuture listener = new PlainActionFuture<>(); - TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, listener); + TransportReplicationAction.PrimaryPhase primaryPhase = action.new PrimaryPhase(request, createTransportChannel(listener)); primaryPhase.run(); // no replica request should have been sent yet assertThat(transport.capturedRequests().length, equalTo(0)); @@ -559,7 +679,6 @@ public class TransportReplicationActionTests extends ESTestCase { } public static class Request extends ReplicationRequest { - int shardId; public AtomicBoolean processedOnPrimary = new AtomicBoolean(); public AtomicInteger processedOnReplicas = new AtomicInteger(); @@ -568,21 +687,19 @@ public class TransportReplicationActionTests extends ESTestCase { Request(ShardId shardId) { this(); - this.shardId = shardId.id(); - this.index(shardId.index().name()); + this.shardId = shardId; + this.index = shardId.getIndex(); // keep things simple } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeVInt(shardId); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardId = in.readVInt(); } } @@ -605,22 +722,17 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - boolean executedBefore = shardRequest.request.processedOnPrimary.getAndSet(true); + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { + boolean executedBefore = shardRequest.processedOnPrimary.getAndSet(true); assert executedBefore == false : "request has already been executed on the primary"; - return new Tuple<>(new Response(), shardRequest.request); + return new Tuple<>(new Response(), shardRequest); } @Override - protected void shardOperationOnReplica(ShardId shardId, Request request) { + protected void shardOperationOnReplica(Request request) { request.processedOnReplicas.incrementAndGet(); } - @Override - protected ShardIterator shards(ClusterState clusterState, InternalRequest request) { - return clusterState.getRoutingTable().index(request.concreteIndex()).shard(request.request().shardId).shardsIt(); - } - @Override protected boolean checkWriteConsistency() { return false; @@ -659,8 +771,8 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - return throwException(shardRequest.shardId); + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { + return throwException(shardRequest.shardId()); } private Tuple throwException(ShardId shardId) { @@ -681,8 +793,8 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected void shardOperationOnReplica(ShardId shardId, Request shardRequest) { - throwException(shardRequest.internalShardId); + protected void shardOperationOnReplica(Request shardRequest) { + throwException(shardRequest.shardId()); } } @@ -697,9 +809,9 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected Tuple shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { + protected Tuple shardOperationOnPrimary(MetaData metaData, Request shardRequest) throws Throwable { awaitLatch(); - return new Tuple<>(new Response(), shardRequest.request); + return new Tuple<>(new Response(), shardRequest); } private void awaitLatch() throws InterruptedException { @@ -708,7 +820,7 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected void shardOperationOnReplica(ShardId shardId, Request shardRequest) { + protected void shardOperationOnReplica(Request shardRequest) { try { awaitLatch(); } catch (InterruptedException e) { @@ -720,7 +832,7 @@ public class TransportReplicationActionTests extends ESTestCase { /* * Transport channel that is needed for replica operation testing. * */ - public TransportChannel createTransportChannel() { + public TransportChannel createTransportChannel(final PlainActionFuture listener) { return new TransportChannel() { @Override @@ -735,14 +847,17 @@ public class TransportReplicationActionTests extends ESTestCase { @Override public void sendResponse(TransportResponse response) throws IOException { + listener.onResponse(((Response) response)); } @Override public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + listener.onResponse(((Response) response)); } @Override public void sendResponse(Throwable error) throws IOException { + listener.onFailure(error); } }; } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java index 29281e256f6..e8be4e34ae0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingBackwardCompatibilityTests.java @@ -26,13 +26,11 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import java.io.BufferedReader; import java.io.InputStreamReader; -import java.nio.file.Path; import java.util.Arrays; public class RoutingBackwardCompatibilityTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java index f124e19c27a..b696c445f30 100644 --- a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java +++ b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java @@ -53,7 +53,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { fail("can't index, does not match consistency"); } catch (UnavailableShardsException e) { assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); - assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}")); + assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [QUORUM] (have 1, needed 2). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]")); // but really, all is well } @@ -76,7 +76,7 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { fail("can't index, does not match consistency"); } catch (UnavailableShardsException e) { assertThat(e.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); - assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}")); + assertThat(e.getMessage(), equalTo("[test][0] Not enough active copies to meet write consistency of [ALL] (have 2, needed 3). Timeout: [100ms], request: [index {[test][type1][1], source[{ type1 : { \"id\" : \"1\", \"name\" : \"test\" } }]}]")); // but really, all is well } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 2b200524b8e..f4a7507a0b8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.common.compress.CompressedXContent; @@ -117,8 +118,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase { if (t instanceof ExecutionException) { t = ((ExecutionException) t).getCause(); } - if (t instanceof IllegalArgumentException) { - assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage()); + final Throwable throwable = ExceptionsHelper.unwrapCause(t); + if (throwable instanceof IllegalArgumentException) { + assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage()); } else { throw t; } @@ -133,8 +135,9 @@ public class MapperServiceTests extends ESSingleNodeTestCase { if (t instanceof ExecutionException) { t = ((ExecutionException) t).getCause(); } - if (t instanceof IllegalArgumentException) { - assertEquals("It is forbidden to index into the default mapping [_default_]", t.getMessage()); + final Throwable throwable = ExceptionsHelper.unwrapCause(t); + if (throwable instanceof IllegalArgumentException) { + assertEquals("It is forbidden to index into the default mapping [_default_]", throwable.getMessage()); } else { throw t; } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java index 4291f00bf1a..66a764dd75a 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java @@ -178,7 +178,7 @@ public class IndicesRequestTests extends ESIntegTestCase { } public void testIndex() { - String[] indexShardActions = new String[]{IndexAction.NAME, IndexAction.NAME + "[r]"}; + String[] indexShardActions = new String[]{IndexAction.NAME, IndexAction.NAME + "[p]", IndexAction.NAME + "[r]"}; interceptTransportActions(indexShardActions); IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias(), "type", "id").source("field", "value"); @@ -189,7 +189,7 @@ public class IndicesRequestTests extends ESIntegTestCase { } public void testDelete() { - String[] deleteShardActions = new String[]{DeleteAction.NAME, DeleteAction.NAME + "[r]"}; + String[] deleteShardActions = new String[]{DeleteAction.NAME, DeleteAction.NAME + "[p]", DeleteAction.NAME + "[r]"}; interceptTransportActions(deleteShardActions); DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias(), "type", "id"); @@ -244,7 +244,7 @@ public class IndicesRequestTests extends ESIntegTestCase { } public void testBulk() { - String[] bulkShardActions = new String[]{BulkAction.NAME + "[s]", BulkAction.NAME + "[s][r]"}; + String[] bulkShardActions = new String[]{BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]"}; interceptTransportActions(bulkShardActions); List indices = new ArrayList<>(); @@ -344,7 +344,7 @@ public class IndicesRequestTests extends ESIntegTestCase { } public void testFlush() { - String[] indexShardActions = new String[]{TransportShardFlushAction.NAME + "[r]", TransportShardFlushAction.NAME}; + String[] indexShardActions = new String[]{TransportShardFlushAction.NAME, TransportShardFlushAction.NAME + "[r]", TransportShardFlushAction.NAME + "[p]"}; interceptTransportActions(indexShardActions); FlushRequest flushRequest = new FlushRequest(randomIndicesOrAliases()); @@ -367,7 +367,7 @@ public class IndicesRequestTests extends ESIntegTestCase { } public void testRefresh() { - String[] indexShardActions = new String[]{TransportShardRefreshAction.NAME + "[r]", TransportShardRefreshAction.NAME}; + String[] indexShardActions = new String[]{TransportShardRefreshAction.NAME, TransportShardRefreshAction.NAME + "[r]", TransportShardRefreshAction.NAME + "[p]"}; interceptTransportActions(indexShardActions); RefreshRequest refreshRequest = new RefreshRequest(randomIndicesOrAliases()); From 7595c4a3c875b6c643da7f28ca30ef0dad2b6520 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 13:22:14 -0500 Subject: [PATCH 47/57] Improve network docs This makes some minor improvements (does not fix all problems!) It reorders unicast disco in elasticsearch.yml to be right after the network host, for better locality. It removes the warning (unreleased) about publish addresses, lets try to really discourage setting that unless you need to (behind a proxy server). Most people should be fine with `network.host` Finally it reorganizes the network docs page a bit: We add a table of 4 "basic" settings at the very beginning: * network.host * discovery.zen.ping.unicast.hosts * http.port * transport.tcp.port The first two being the most important, which addresses to bind and talk to, and the other two being the port numbers. The rest of the stuff I tried to simplify and reorder under "advanced" headers. This is just a quick stab, I still think we need more effort into this thing, but we gotta start somewhere. --- .../common/network/NetworkService.java | 8 +- .../main/resources/config/elasticsearch.yml | 20 ++-- docs/reference/modules/network.asciidoc | 107 ++++++++++-------- 3 files changed, 69 insertions(+), 66 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 05eaac15f42..835a35d2383 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -137,8 +137,7 @@ public class NetworkService extends AbstractComponent { * Resolves {@code publishHosts} to a single publish address. The fact that it returns * only one address is just a current limitation. *

- * If {@code publishHosts} resolves to more than one address, then one is selected with magic, - * and the user is warned (they can always just be more specific). + * If {@code publishHosts} resolves to more than one address, then one is selected with magic * @param publishHosts list of hosts to publish as. this may contain special pseudo-hostnames * such as _local_ (see the documentation). if it is null, it will be populated * based on global default settings. @@ -186,13 +185,12 @@ public class NetworkService extends AbstractComponent { } } - // 3. warn user if we end out with multiple publish addresses + // 3. if we end out with multiple publish addresses, select by preference. + // don't warn the user, or they will get confused by bind_host vs publish_host etc. if (addresses.length > 1) { List sorted = new ArrayList<>(Arrays.asList(addresses)); NetworkUtils.sortAddresses(sorted); addresses = new InetAddress[] { sorted.get(0) }; - logger.warn("publish host: {} resolves to multiple addresses, auto-selecting {{}} as single publish address", - Arrays.toString(publishHosts), NetworkAddress.format(addresses[0])); } return addresses[0]; } diff --git a/distribution/src/main/resources/config/elasticsearch.yml b/distribution/src/main/resources/config/elasticsearch.yml index 51630fe0804..4b335ce7a19 100644 --- a/distribution/src/main/resources/config/elasticsearch.yml +++ b/distribution/src/main/resources/config/elasticsearch.yml @@ -60,19 +60,8 @@ # For more information, see the documentation at: # # -# ---------------------------------- Gateway ----------------------------------- -# -# Block initial recovery after a full cluster restart until N nodes are started: -# -# gateway.recover_after_nodes: 3 -# -# For more information, see the documentation at: -# -# # --------------------------------- Discovery ---------------------------------- # -# Elasticsearch nodes will find each other via unicast, by default. -# # Pass an initial list of hosts to perform discovery when new node is started: # The default list of hosts is ["127.0.0.1", "[::1]"] # @@ -85,6 +74,15 @@ # For more information, see the documentation at: # # +# ---------------------------------- Gateway ----------------------------------- +# +# Block initial recovery after a full cluster restart until N nodes are started: +# +# gateway.recover_after_nodes: 3 +# +# For more information, see the documentation at: +# +# # ---------------------------------- Various ----------------------------------- # # Disable starting multiple nodes on a single system: diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 4572efe419a..5b990b0f105 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -1,16 +1,53 @@ [[modules-network]] -== Network Settings +== Basic Settings -There are several modules within a Node that use network based -configuration, for example, the -<> and -<> modules. Node level -network settings allows to set common settings that will be shared among -all network based modules (unless explicitly overridden in each module). +Commonly used network settings: -Be careful with host configuration! Never expose an unprotected instance +[cols="<,<",options="header",] +|======================================================================= +|Name |Description +|`network.host` |Host to bind and publish to other nodes. Can be set to an IP address, hostname, or special value (see table below). Defaults to `_local_`. + +|`discovery.zen.ping.unicast.hosts`|Initial list other nodes. Can be set to IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. + +|`http.port` |Port to bind for incoming http requests. Can be set to a single value or a range. Defaults to `9200-9300`. + +|`transport.tcp.port` |Port to bind for communication between nodes. Can be set to a single value or a range. Defaults to `9300-9400`. + +Be careful with network configuration! Never expose an unprotected instance to the public internet. +[cols="<,<",options="header",] +|======================================================================= +|Special Host Value |Description +|`_[networkInterface]_` |Resolves to the addresses of the provided +network interface. For example `_en0_`. + +|`_local_` |Will be resolved to loopback addresses (e.g. 127.0.0.1) + +|`_site_` |Will be resolved to site-local addresses (e.g. 192.168.0.1) + +|`_global_` |Will be resolved to globally-scoped addresses (e.g. 8.8.8.8) + +These special values will work over both IPv4 and IPv6 by default, +but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers, for +example `_en0:ipv4_` would only bind to the ipv4 addresses of interface `en0`. + +|======================================================================= + +When the `discovery-ec2` plugin is installed, you can use +{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. + +When the `discovery-gce` plugin is installed, you can use +{plugins}/discovery-gce-network-host.html[gce specific host settings]. + +[float] +[[advanced]] +=== Advanced network settings + +`network.bind_host` and `network.publish_host` can be set instead of `network.host` +for advanced cases such as when behind a proxy server. + The `network.bind_host` setting allows to control the host different network components will bind on. By default, the bind host will be `_local_` (loopback addresses such as `127.0.0.1`, `::1`). @@ -21,54 +58,13 @@ Currently an elasticsearch node may be bound to multiple addresses, but only publishes one. If not specified, this defaults to the "best" address from `network.bind_host`, sorted by IPv4/IPv6 stack preference, then by reachability. -The `network.host` setting is a simple setting to automatically set both -`network.bind_host` and `network.publish_host` to the same host value. - Both settings allows to be configured with either explicit host address(es) or host name(s). The settings also accept logical setting value(s) explained in the following table: -[cols="<,<",options="header",] -|======================================================================= -|Logical Host Setting Value |Description -|`_local_` |Will be resolved to loopback addresses - -|`_local:ipv4_` |Will be resolved to loopback IPv4 addresses (e.g. 127.0.0.1) - -|`_local:ipv6_` |Will be resolved to loopback IPv6 addresses (e.g. ::1) - -|`_site_` |Will be resolved to site-local addresses ("private network") - -|`_site:ipv4_` |Will be resolved to site-local IPv4 addresses (e.g. 192.168.0.1) - -|`_site:ipv6_` |Will be resolved to site-local IPv6 addresses (e.g. fec0::1) - -|`_global_` |Will be resolved to globally-scoped addresses ("publicly reachable") - -|`_global:ipv4_` |Will be resolved to globally-scoped IPv4 addresses (e.g. 8.8.8.8) - -|`_global:ipv6_` |Will be resolved to globally-scoped IPv6 addresses (e.g. 2001:4860:4860::8888) - -|`_[networkInterface]_` |Resolves to the addresses of the provided -network interface. For example `_en0_`. - -|`_[networkInterface]:ipv4_` |Resolves to the ipv4 addresses of the -provided network interface. For example `_en0:ipv4_`. - -|`_[networkInterface]:ipv6_` |Resolves to the ipv6 addresses of the -provided network interface. For example `_en0:ipv6_`. -|======================================================================= - -When the `discovery-ec2` plugin is installed, you can use -{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. - -When the `discovery-gce` plugin is installed, you can use -{plugins}/discovery-gce-network-host.html[gce specific host settings]. - - [float] [[tcp-settings]] -=== TCP Settings +=== Advanced TCP Settings Any component that uses TCP (like the HTTP, Transport and Memcached) share the following allowed settings: @@ -92,3 +88,14 @@ Defaults to `true` on non-windows machines. size (in size setting format). By default not explicitly set. |======================================================================= +[float] +[[module-settings]] +=== Module-specific Settings + +There are several modules within a Node that use network based +configuration, for example, the +<> and +<> modules. Node level +network settings allows to set common settings that will be shared among +all network based modules (unless explicitly overridden in each module). + From 27c08d452e38c8958222db6a5c1a784e239b3e0f Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 14:04:44 -0500 Subject: [PATCH 48/57] fix tables --- docs/reference/modules/network.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 5b990b0f105..2c6bc6ea06a 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -13,6 +13,7 @@ Commonly used network settings: |`http.port` |Port to bind for incoming http requests. Can be set to a single value or a range. Defaults to `9200-9300`. |`transport.tcp.port` |Port to bind for communication between nodes. Can be set to a single value or a range. Defaults to `9300-9400`. +|======================================================================= Be careful with network configuration! Never expose an unprotected instance to the public internet. @@ -28,13 +29,12 @@ network interface. For example `_en0_`. |`_site_` |Will be resolved to site-local addresses (e.g. 192.168.0.1) |`_global_` |Will be resolved to globally-scoped addresses (e.g. 8.8.8.8) +|======================================================================= These special values will work over both IPv4 and IPv6 by default, but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers, for example `_en0:ipv4_` would only bind to the ipv4 addresses of interface `en0`. -|======================================================================= - When the `discovery-ec2` plugin is installed, you can use {plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. From 610e9b543676ab34222b6f0e6dfcab1999b2b047 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 14:10:21 -0500 Subject: [PATCH 49/57] add missing header --- docs/reference/modules/network.asciidoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 2c6bc6ea06a..9ec867d81e9 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -18,6 +18,10 @@ Commonly used network settings: Be careful with network configuration! Never expose an unprotected instance to the public internet. +[float] +[[special-values]] +=== Special values for `network.host` + [cols="<,<",options="header",] |======================================================================= |Special Host Value |Description From 3049b14f6bdd79ca7c378d4df84c0781d6bf1662 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 14:11:32 -0500 Subject: [PATCH 50/57] add missing 'of' --- docs/reference/modules/network.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 9ec867d81e9..6c16d846d95 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -8,7 +8,7 @@ Commonly used network settings: |Name |Description |`network.host` |Host to bind and publish to other nodes. Can be set to an IP address, hostname, or special value (see table below). Defaults to `_local_`. -|`discovery.zen.ping.unicast.hosts`|Initial list other nodes. Can be set to IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. +|`discovery.zen.ping.unicast.hosts`|Initial list of other nodes. Can be set to IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. |`http.port` |Port to bind for incoming http requests. Can be set to a single value or a range. Defaults to `9200-9300`. From f578254ca746009ac1b0274e213f58b1975d47cb Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 14:27:49 -0500 Subject: [PATCH 51/57] simplify wording --- docs/reference/modules/network.asciidoc | 33 +++++++++++-------------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 6c16d846d95..0352e1d327c 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -6,13 +6,13 @@ Commonly used network settings: [cols="<,<",options="header",] |======================================================================= |Name |Description -|`network.host` |Host to bind and publish to other nodes. Can be set to an IP address, hostname, or special value (see table below). Defaults to `_local_`. +|`network.host` |Host to bind and publish to other nodes. Accepts an IP address, hostname, or special value (see table below). Defaults to `_local_`. -|`discovery.zen.ping.unicast.hosts`|Initial list of other nodes. Can be set to IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. +|`discovery.zen.ping.unicast.hosts`|Initial list of other nodes. Accepts IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. -|`http.port` |Port to bind for incoming http requests. Can be set to a single value or a range. Defaults to `9200-9300`. +|`http.port` |Port to bind for incoming http requests. Accepts a single value or a range. Defaults to `9200-9300`. -|`transport.tcp.port` |Port to bind for communication between nodes. Can be set to a single value or a range. Defaults to `9300-9400`. +|`transport.tcp.port` |Port to bind for communication between nodes. Accepts a single value or a range. Defaults to `9300-9400`. |======================================================================= Be careful with network configuration! Never expose an unprotected instance @@ -25,19 +25,18 @@ to the public internet. [cols="<,<",options="header",] |======================================================================= |Special Host Value |Description -|`_[networkInterface]_` |Resolves to the addresses of the provided -network interface. For example `_en0_`. +|`_[networkInterface]_` |Addresses of a network interface, for example `_en0_`. -|`_local_` |Will be resolved to loopback addresses (e.g. 127.0.0.1) +|`_local_` |Any loopback addresses on the system, for example `127.0.0.1`. -|`_site_` |Will be resolved to site-local addresses (e.g. 192.168.0.1) +|`_site_` |Any site-local addresses on the system, for example `192.168.0.1`. -|`_global_` |Will be resolved to globally-scoped addresses (e.g. 8.8.8.8) +|`_global_` |Any globally-scoped addresses on the system, for example `8.8.8.8`. |======================================================================= These special values will work over both IPv4 and IPv6 by default, -but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers, for -example `_en0:ipv4_` would only bind to the ipv4 addresses of interface `en0`. +but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers. For +example, `_en0:ipv4_` would only bind to the ipv4 addresses of interface `en0`. When the `discovery-ec2` plugin is installed, you can use {plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. @@ -52,19 +51,17 @@ When the `discovery-gce` plugin is installed, you can use `network.bind_host` and `network.publish_host` can be set instead of `network.host` for advanced cases such as when behind a proxy server. -The `network.bind_host` setting allows to control the host different network -components will bind on. By default, the bind host will be `_local_` -(loopback addresses such as `127.0.0.1`, `::1`). +`network.bind_host` sets the host different network +components will bind on. -The `network.publish_host` setting allows to control the host the node will +`network.publish_host` sets the host the node will publish itself within the cluster so other nodes will be able to connect to it. Currently an elasticsearch node may be bound to multiple addresses, but only publishes one. If not specified, this defaults to the "best" address from `network.bind_host`, sorted by IPv4/IPv6 stack preference, then by reachability. -Both settings allows to be configured with either explicit host address(es) -or host name(s). The settings also accept logical setting value(s) explained -in the following table: +Both settings can be configured just like `network.host`: they accept ip +addresses, host names, and special values. [float] [[tcp-settings]] From fac8d9735681fa479c8703ca1d50d03386ccccc7 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 9 Dec 2015 14:30:17 -0500 Subject: [PATCH 52/57] ipv4 -> IPv4 --- docs/reference/modules/network.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 0352e1d327c..ffce7d61a00 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -36,7 +36,7 @@ to the public internet. These special values will work over both IPv4 and IPv6 by default, but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers. For -example, `_en0:ipv4_` would only bind to the ipv4 addresses of interface `en0`. +example, `_en0:ipv4_` would only bind to the IPv4 addresses of interface `en0`. When the `discovery-ec2` plugin is installed, you can use {plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. From 9d71c7210bfa68218833f18b879cf399513309b3 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Dec 2015 11:23:48 +0100 Subject: [PATCH 53/57] Added "size units" to API conventions docs --- docs/reference/api-conventions.asciidoc | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 98fbcae2747..6bbd0419143 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -360,6 +360,22 @@ are: `s`:: Second `ms`:: Milli-second +[[size-units]] +[float] +=== Data size units + +Whenever the size of data needs to be specified, eg when setting a buffer size +parameter, the value must specify the unit, like `10kb` for 10 kilobytes. The +supported units are: + +[horizontal] +`b`:: Bytes +`kb`:: Kilobytes +`mb`:: Megabytes +`gb`:: Gigabytes +`tb`:: Terabytes +`pb`:: Petabytes + [[distance-units]] [float] === Distance Units From f43c8476aa3956d2e0e7bf37a7f5e3b604ed6e06 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 10 Dec 2015 11:24:25 +0100 Subject: [PATCH 54/57] Improvements to network docs --- docs/reference/index.asciidoc | 1 + docs/reference/modules/network.asciidoc | 207 ++++++++++++++++-------- 2 files changed, 141 insertions(+), 67 deletions(-) diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 34d1cba92c0..4acd1f16eab 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -7,6 +7,7 @@ :jdk: 1.8.0_25 :defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current :plugins: https://www.elastic.co/guide/en/elasticsearch/plugins/master +:javaclient: https://www.elastic.co/guide/en/elasticsearch/client/java-api/master/ :issue: https://github.com/elastic/elasticsearch/issues/ :pull: https://github.com/elastic/elasticsearch/pull/ diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index ffce7d61a00..5a710598206 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -1,102 +1,175 @@ [[modules-network]] -== Basic Settings +== Network Settings -Commonly used network settings: +Elasticsearch binds to localhost only by default. This is sufficient for you +to run a local development server (or even a development cluster, if you start +multiple nodes on the same machine), but you will need to configure some +<> in order to run a real +production cluster across multiple servers. -[cols="<,<",options="header",] -|======================================================================= -|Name |Description -|`network.host` |Host to bind and publish to other nodes. Accepts an IP address, hostname, or special value (see table below). Defaults to `_local_`. - -|`discovery.zen.ping.unicast.hosts`|Initial list of other nodes. Accepts IP addresses or hostnames. Defaults to `["127.0.0.1", "[::1]"]`. - -|`http.port` |Port to bind for incoming http requests. Accepts a single value or a range. Defaults to `9200-9300`. - -|`transport.tcp.port` |Port to bind for communication between nodes. Accepts a single value or a range. Defaults to `9300-9400`. -|======================================================================= - -Be careful with network configuration! Never expose an unprotected instance -to the public internet. +[WARNING] +.Be careful with the network configuration! +============================= +Never expose an unprotected node to the public internet. +============================= [float] -[[special-values]] +[[common-network-settings]] +=== Commonly Used Network Settings + +`network.host`:: + +The node will bind to this hostname or IP address and _publish_ (advertise) +this host to other nodes in the cluster. Accepts an IP address, hostname, or a +<>. ++ +Defaults to `_local_`. + +`discovery.zen.ping.unicast.hosts`:: + +In order to join a cluster, a node needs to know the hostname or IP address of +at least some of the other nodes in the cluster. This settting provides the +initial list of other nodes that this node will try to contact. Accepts IP +addresses or hostnames. ++ +Defaults to `["127.0.0.1", "[::1]"]`. + +`http.port`:: + +Port to bind to for incoming HTTP requests. Accepts a single value or a range. +If a range is specified, the node will bind to the first available port in the +range. ++ +Defaults to `9200-9300`. + +`transport.tcp.port`:: + +Port to bind for communication between nodes. Accepts a single value or a +range. If a range is specified, the node will bind to the first available port +in the range. ++ +Defaults to `9300-9400`. + +[float] +[[network-interface-values]] === Special values for `network.host` -[cols="<,<",options="header",] -|======================================================================= -|Special Host Value |Description -|`_[networkInterface]_` |Addresses of a network interface, for example `_en0_`. +The following special values may be passed to `network.host`: -|`_local_` |Any loopback addresses on the system, for example `127.0.0.1`. +[horizontal] +`_[networkInterface]_`:: -|`_site_` |Any site-local addresses on the system, for example `192.168.0.1`. + Addresses of a network interface, for example `_en0_`. -|`_global_` |Any globally-scoped addresses on the system, for example `8.8.8.8`. -|======================================================================= +`_local_`:: -These special values will work over both IPv4 and IPv6 by default, -but you can also limit this with the use of `:ipv4` of `:ipv6` specifiers. For -example, `_en0:ipv4_` would only bind to the IPv4 addresses of interface `en0`. + Any loopback addresses on the system, for example `127.0.0.1`. -When the `discovery-ec2` plugin is installed, you can use -{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[ec2 specific host settings]. +`_site_`:: + + Any site-local addresses on the system, for example `192.168.0.1`. + +`_global_`:: + + Any globally-scoped addresses on the system, for example `8.8.8.8`. -When the `discovery-gce` plugin is installed, you can use -{plugins}/discovery-gce-network-host.html[gce specific host settings]. [float] -[[advanced]] +==== IPv4 vs IPv6 + +These special values will work over both IPv4 and IPv6 by default, but you can +also limit this with the use of `:ipv4` of `:ipv6` specifiers. For example, +`_en0:ipv4_` would only bind to the IPv4 addresses of interface `en0`. + +[TIP] +.Discovery in the cloud +================================ + +More special settings are available when running in the cloud with either the +{plugins}/discovery-ec2-discovery.html#discovery-ec2-network-host[EC2 discovery plugin] or the +{plugins}/discovery-gce-network-host.html#discovery-gce-network-host[Google Compute Engine discovery plugin] +installed. + +================================ + +[float] +[[advanced-network-settings]] === Advanced network settings -`network.bind_host` and `network.publish_host` can be set instead of `network.host` -for advanced cases such as when behind a proxy server. +The `network.host` setting explained in <> +is a shortcut which sets the _bind host_ and the _publish host_ at the same +time. In advanced used cases, such as when running behind a proxy server, you +may need to set these settings to different values: -`network.bind_host` sets the host different network -components will bind on. +`network.bind_host`:: -`network.publish_host` sets the host the node will -publish itself within the cluster so other nodes will be able to connect to it. -Currently an elasticsearch node may be bound to multiple addresses, but only -publishes one. If not specified, this defaults to the "best" address from -`network.bind_host`, sorted by IPv4/IPv6 stack preference, then by reachability. +This specifies which network interface(s) a node should bind to in order to +listen for incoming requests. A node can bind to multiple interfaces, e.g. +two network cards, or a site-local address and a local address. Defaults to +`network.host`. -Both settings can be configured just like `network.host`: they accept ip -addresses, host names, and special values. +`network.publish_host`:: + +The publish host is the single interface that the node advertises to other +nodes in the cluster, so that those nodes can connect to it. Currently an +elasticsearch node may be bound to multiple addresses, but only publishes one. +If not specified, this defaults to the ``best'' address from +`network.bind_host`, sorted by IPv4/IPv6 stack preference, then by +reachability. + +Both of the above settings can be configured just like `network.host` -- they +accept IP addresses, host names, and +<>. [float] [[tcp-settings]] === Advanced TCP Settings -Any component that uses TCP (like the HTTP, Transport and Memcached) -share the following allowed settings: +Any component that uses TCP (like the <> and +<> modules) share the following settings: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`network.tcp.no_delay` |Enable or disable tcp no delay setting. +[horizontal] +`network.tcp.no_delay`:: + +Enable or disable the https://en.wikipedia.org/wiki/Nagle%27s_algorithm[TCP no delay] +setting. Defaults to `true`. + +`network.tcp.keep_alive`:: + +Enable or disable https://en.wikipedia.org/wiki/Keepalive[TCP keep alive]. Defaults to `true`. -|`network.tcp.keep_alive` |Enable or disable tcp keep alive. Defaults -to `true`. +`network.tcp.reuse_address`:: -|`network.tcp.reuse_address` |Should an address be reused or not. -Defaults to `true` on non-windows machines. +Should an address be reused or not. Defaults to `true` on non-windows +machines. -|`network.tcp.send_buffer_size` |The size of the tcp send buffer size -(in size setting format). By default not explicitly set. +`network.tcp.send_buffer_size`:: -|`network.tcp.receive_buffer_size` |The size of the tcp receive buffer -size (in size setting format). By default not explicitly set. -|======================================================================= +The size of the TCP send buffer (specified with <>). +By default not explicitly set. + +`network.tcp.receive_buffer_size`:: + +The size of the TCP receive buffer (specified with <>). +By default not explicitly set. [float] -[[module-settings]] -=== Module-specific Settings +=== Transport and HTTP protocols -There are several modules within a Node that use network based -configuration, for example, the -<> and -<> modules. Node level -network settings allows to set common settings that will be shared among -all network based modules (unless explicitly overridden in each module). +An Elasticsearch node exposes two network protocols which inherit the above +settings, but may be further configured independently: + +TCP Transport:: + +Used for communication between nodes in the cluster and by the Java +{javaclient}/node-client.html[Node client], +{javaclient}/transport-client.html[Transport client], and by the +<>. See the <> +for more information. + +HTTP:: + +Exposes the JSON-over-HTTP interface used by all clients other than the Java +clients. See the <> for more information. From fc9afa2bb8abd2437e683b53964a5a9b49bf11c5 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 10 Dec 2015 14:24:35 +0100 Subject: [PATCH 55/57] Reduce maximum number of concurrent requests in BulkProcessorIT The test configuration with seed A23029712A7EFB34 overwhelmed the pool which is invoked in TransportService#sendLocalRequest(). With this commit we reduce the maximum number of concurrent requests from 10 to 7 and add the failure message to the test output on the failing assertion for easier analysis. --- .../java/org/elasticsearch/action/bulk/BulkProcessorIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index ded2abb494f..237f3a2e821 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -105,7 +105,7 @@ public class BulkProcessorIT extends ESIntegTestCase { public void testBulkProcessorConcurrentRequests() throws Exception { int bulkActions = randomIntBetween(10, 100); int numDocs = randomIntBetween(bulkActions, bulkActions + 100); - int concurrentRequests = randomIntBetween(0, 10); + int concurrentRequests = randomIntBetween(0, 7); int expectedBulkActions = numDocs / bulkActions; @@ -141,7 +141,7 @@ public class BulkProcessorIT extends ESIntegTestCase { Set ids = new HashSet<>(); for (BulkItemResponse bulkItemResponse : listener.bulkItems) { - assertThat(bulkItemResponse.isFailed(), equalTo(false)); + assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false)); assertThat(bulkItemResponse.getIndex(), equalTo("test")); assertThat(bulkItemResponse.getType(), equalTo("test")); //with concurrent requests > 1 we can't rely on the order of the bulk requests From fab44398d9d48f12319bc018d4b436f723b6508e Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Mon, 15 Jun 2015 16:32:44 +0900 Subject: [PATCH 56/57] Analysis: Add detail response support add explain option fix char_filter bug Closes #11076 #15257 --- .../admin/indices/analyze/AnalyzeRequest.java | 46 ++- .../analyze/AnalyzeRequestBuilder.java | 16 + .../indices/analyze/AnalyzeResponse.java | 82 ++++- .../analyze/DetailAnalyzeResponse.java | 319 ++++++++++++++++++ .../analyze/TransportAnalyzeAction.java | 235 ++++++++++++- .../indices/analyze/RestAnalyzeAction.java | 51 ++- .../indices/analyze/AnalyzeActionIT.java | 242 ++++++++++++- docs/reference/indices/analyze.asciidoc | 71 ++++ .../rest-api-spec/api/indices.analyze.json | 8 + .../test/indices.analyze/10_analyze.yaml | 28 ++ 10 files changed, 1055 insertions(+), 43 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index 6482e340d1a..db1a03efbc6 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.admin.indices.analyze; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.Strings; @@ -46,6 +47,10 @@ public class AnalyzeRequest extends SingleShardRequest { private String field; + private boolean explain = false; + + private String[] attributes = Strings.EMPTY_ARRAY; + public AnalyzeRequest() { } @@ -86,6 +91,9 @@ public class AnalyzeRequest extends SingleShardRequest { } public AnalyzeRequest tokenFilters(String... tokenFilters) { + if (tokenFilters == null) { + throw new IllegalArgumentException("token filters must not be null"); + } this.tokenFilters = tokenFilters; return this; } @@ -95,6 +103,9 @@ public class AnalyzeRequest extends SingleShardRequest { } public AnalyzeRequest charFilters(String... charFilters) { + if (charFilters == null) { + throw new IllegalArgumentException("char filters must not be null"); + } this.charFilters = charFilters; return this; } @@ -112,18 +123,33 @@ public class AnalyzeRequest extends SingleShardRequest { return this.field; } + public AnalyzeRequest explain(boolean explain) { + this.explain = explain; + return this; + } + + public boolean explain() { + return this.explain; + } + + public AnalyzeRequest attributes(String... attributes) { + if (attributes == null) { + throw new IllegalArgumentException("attributes must not be null"); + } + this.attributes = attributes; + return this; + } + + public String[] attributes() { + return this.attributes; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (text == null || text.length == 0) { validationException = addValidationError("text is missing", validationException); } - if (tokenFilters == null) { - validationException = addValidationError("token filters must not be null", validationException); - } - if (charFilters == null) { - validationException = addValidationError("char filters must not be null", validationException); - } return validationException; } @@ -136,6 +162,10 @@ public class AnalyzeRequest extends SingleShardRequest { tokenFilters = in.readStringArray(); charFilters = in.readStringArray(); field = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_2_2_0)) { + explain = in.readBoolean(); + attributes = in.readStringArray(); + } } @Override @@ -147,5 +177,9 @@ public class AnalyzeRequest extends SingleShardRequest { out.writeStringArray(tokenFilters); out.writeStringArray(charFilters); out.writeOptionalString(field); + if (out.getVersion().onOrAfter(Version.V_2_2_0)) { + out.writeBoolean(explain); + out.writeStringArray(attributes); + } } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index 9ed02e6be1c..23c1739d771 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -78,6 +78,22 @@ public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder, ToXContent { - public static class AnalyzeToken implements Streamable { + public static class AnalyzeToken implements Streamable, ToXContent { private String term; private int startOffset; private int endOffset; private int position; + private Map attributes; private String type; AnalyzeToken() { } - public AnalyzeToken(String term, int position, int startOffset, int endOffset, String type) { + public AnalyzeToken(String term, int position, int startOffset, int endOffset, String type, + Map attributes) { this.term = term; this.position = position; this.startOffset = startOffset; this.endOffset = endOffset; this.type = type; + this.attributes = attributes; } public String getTerm() { @@ -74,6 +79,27 @@ public class AnalyzeResponse extends ActionResponse implements Iterable getAttributes(){ + return this.attributes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.TOKEN, term); + builder.field(Fields.START_OFFSET, startOffset); + builder.field(Fields.END_OFFSET, endOffset); + builder.field(Fields.TYPE, type); + builder.field(Fields.POSITION, position); + if (attributes != null && !attributes.isEmpty()) { + for (Map.Entry entity : attributes.entrySet()) { + builder.field(entity.getKey(), entity.getValue()); + } + } + builder.endObject(); + return builder; + } + public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException { AnalyzeToken analyzeToken = new AnalyzeToken(); analyzeToken.readFrom(in); @@ -87,6 +113,9 @@ public class AnalyzeResponse extends ActionResponse implements Iterable) in.readGenericValue(); + } } @Override @@ -96,22 +125,32 @@ public class AnalyzeResponse extends ActionResponse implements Iterable tokens; AnalyzeResponse() { } - public AnalyzeResponse(List tokens) { + public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { this.tokens = tokens; + this.detail = detail; } public List getTokens() { return this.tokens; } + public DetailAnalyzeResponse detail() { + return this.detail; + } + @Override public Iterator iterator() { return tokens.iterator(); @@ -119,17 +158,19 @@ public class AnalyzeResponse extends ActionResponse implements Iterable 0) { + charfilters = new CharFilteredText[size]; + for (int i = 0; i < size; i++) { + charfilters[i] = CharFilteredText.readCharFilteredText(in); + } + } + size = in.readVInt(); + if (size > 0) { + tokenfilters = new AnalyzeTokenList[size]; + for (int i = 0; i < size; i++) { + tokenfilters[i] = AnalyzeTokenList.readAnalyzeTokenList(in); + } + } + } else { + analyzer = AnalyzeTokenList.readAnalyzeTokenList(in); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(customAnalyzer); + if (customAnalyzer) { + tokenizer.writeTo(out); + if (charfilters != null) { + out.writeVInt(charfilters.length); + for (CharFilteredText charfilter : charfilters) { + charfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + if (tokenfilters != null) { + out.writeVInt(tokenfilters.length); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + } else { + analyzer.writeTo(out); + } + } + + public static class AnalyzeTokenList implements Streamable, ToXContent { + private String name; + private AnalyzeResponse.AnalyzeToken[] tokens; + + AnalyzeTokenList() { + } + + public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { + this.name = name; + this.tokens = tokens; + } + + public String getName() { + return name; + } + + public AnalyzeResponse.AnalyzeToken[] getTokens() { + return tokens; + } + + public static AnalyzeTokenList readAnalyzeTokenList(StreamInput in) throws IOException { + AnalyzeTokenList list = new AnalyzeTokenList(); + list.readFrom(in); + return list; + } + + public XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.NAME, this.name); + builder.startArray(AnalyzeResponse.Fields.TOKENS); + for (AnalyzeResponse.AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.NAME, this.name); + builder.startArray(AnalyzeResponse.Fields.TOKENS); + for (AnalyzeResponse.AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + name = in.readString(); + int size = in.readVInt(); + if (size > 0) { + tokens = new AnalyzeResponse.AnalyzeToken[size]; + for (int i = 0; i < size; i++) { + tokens[i] = AnalyzeResponse.AnalyzeToken.readAnalyzeToken(in); + } + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + if (tokens != null) { + out.writeVInt(tokens.length); + for (AnalyzeResponse.AnalyzeToken token : tokens) { + token.writeTo(out); + } + } else { + out.writeVInt(0); + } + } + } + + public static class CharFilteredText implements Streamable, ToXContent { + private String name; + private String[] texts; + CharFilteredText() { + } + + public CharFilteredText(String name, String[] texts) { + this.name = name; + if (texts != null) { + this.texts = texts; + } else { + this.texts = Strings.EMPTY_ARRAY; + } + } + + public String getName() { + return name; + } + + public String[] getTexts() { + return texts; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.NAME, name); + builder.field(Fields.FILTERED_TEXT, texts); + builder.endObject(); + return builder; + } + + public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException { + CharFilteredText text = new CharFilteredText(); + text.readFrom(in); + return text; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + name = in.readString(); + texts = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeStringArray(texts); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index ba49c33a15d..ecdf977b923 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -20,10 +20,15 @@ package org.elasticsearch.action.admin.indices.analyze; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.util.Attribute; +import org.apache.lucene.util.AttributeReflector; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -33,6 +38,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; @@ -46,8 +52,8 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.io.Reader; +import java.util.*; /** * Transport action used to execute analyze requests @@ -222,6 +228,23 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokens = null; + DetailAnalyzeResponse detail = null; + + if (request.explain()) { + detail = detailAnalyze(request, analyzer, field); + } else { + tokens = simpleAnalyze(request, analyzer, field); + } + + if (closeAnalyzer) { + analyzer.close(); + } + + return new AnalyzeResponse(tokens, detail); + } + + private static List simpleAnalyze(AnalyzeRequest request, Analyzer analyzer, String field) { List tokens = new ArrayList<>(); int lastPosition = -1; int lastOffset = 0; @@ -238,7 +261,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction 0) { lastPosition = lastPosition + increment; } - tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), type.type())); + tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), type.type(), null)); } stream.end(); @@ -251,11 +274,211 @@ public class TransportAnalyzeAction extends TransportSingleShardAction includeAttributes = new HashSet<>(); + if (request.attributes() != null) { + for (String attribute : request.attributes()) { + includeAttributes.add(attribute.toLowerCase(Locale.ROOT)); + } } - return new AnalyzeResponse(tokens); + CustomAnalyzer customAnalyzer = null; + if (analyzer instanceof CustomAnalyzer) { + customAnalyzer = (CustomAnalyzer) analyzer; + } else if (analyzer instanceof NamedAnalyzer && ((NamedAnalyzer) analyzer).analyzer() instanceof CustomAnalyzer) { + customAnalyzer = (CustomAnalyzer) ((NamedAnalyzer) analyzer).analyzer(); + } + + if (customAnalyzer != null) { + // customAnalyzer = divide charfilter, tokenizer tokenfilters + CharFilterFactory[] charFilterFactories = customAnalyzer.charFilters(); + TokenizerFactory tokenizerFactory = customAnalyzer.tokenizerFactory(); + TokenFilterFactory[] tokenFilterFactories = customAnalyzer.tokenFilters(); + + String[][] charFiltersTexts = new String[charFilterFactories != null ? charFilterFactories.length : 0][request.text().length]; + TokenListCreator[] tokenFiltersTokenListCreator = new TokenListCreator[tokenFilterFactories != null ? tokenFilterFactories.length : 0]; + + TokenListCreator tokenizerTokenListCreator = new TokenListCreator(); + + for (int textIndex = 0; textIndex < request.text().length; textIndex++) { + String charFilteredSource = request.text()[textIndex]; + + Reader reader = new FastStringReader(charFilteredSource); + if (charFilterFactories != null) { + + for (int charFilterIndex = 0; charFilterIndex < charFilterFactories.length; charFilterIndex++) { + reader = charFilterFactories[charFilterIndex].create(reader); + Reader readerForWriteOut = new FastStringReader(charFilteredSource); + readerForWriteOut = charFilterFactories[charFilterIndex].create(readerForWriteOut); + charFilteredSource = writeCharStream(readerForWriteOut); + charFiltersTexts[charFilterIndex][textIndex] = charFilteredSource; + } + } + + // analyzing only tokenizer + Tokenizer tokenizer = tokenizerFactory.create(); + tokenizer.setReader(reader); + tokenizerTokenListCreator.analyze(tokenizer, customAnalyzer, field, includeAttributes); + + // analyzing each tokenfilter + if (tokenFilterFactories != null) { + for (int tokenFilterIndex = 0; tokenFilterIndex < tokenFilterFactories.length; tokenFilterIndex++) { + if (tokenFiltersTokenListCreator[tokenFilterIndex] == null) { + tokenFiltersTokenListCreator[tokenFilterIndex] = new TokenListCreator(); + } + TokenStream stream = createStackedTokenStream(request.text()[textIndex], + charFilterFactories, tokenizerFactory, tokenFilterFactories, tokenFilterIndex + 1); + tokenFiltersTokenListCreator[tokenFilterIndex].analyze(stream, customAnalyzer, field, includeAttributes); + } + } + } + + DetailAnalyzeResponse.CharFilteredText[] charFilteredLists = new DetailAnalyzeResponse.CharFilteredText[charFiltersTexts.length]; + if (charFilterFactories != null) { + for (int charFilterIndex = 0; charFilterIndex < charFiltersTexts.length; charFilterIndex++) { + charFilteredLists[charFilterIndex] = new DetailAnalyzeResponse.CharFilteredText( + charFilterFactories[charFilterIndex].name(), charFiltersTexts[charFilterIndex]); + } + } + DetailAnalyzeResponse.AnalyzeTokenList[] tokenFilterLists = new DetailAnalyzeResponse.AnalyzeTokenList[tokenFiltersTokenListCreator.length]; + if (tokenFilterFactories != null) { + for (int tokenFilterIndex = 0; tokenFilterIndex < tokenFiltersTokenListCreator.length; tokenFilterIndex++) { + tokenFilterLists[tokenFilterIndex] = new DetailAnalyzeResponse.AnalyzeTokenList( + tokenFilterFactories[tokenFilterIndex].name(), tokenFiltersTokenListCreator[tokenFilterIndex].getArrayTokens()); + } + } + detailResponse = new DetailAnalyzeResponse(charFilteredLists, new DetailAnalyzeResponse.AnalyzeTokenList(tokenizerFactory.name(), tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); + } else { + String name; + if (analyzer instanceof NamedAnalyzer) { + name = ((NamedAnalyzer) analyzer).name(); + } else { + name = analyzer.getClass().getName(); + } + + TokenListCreator tokenListCreator = new TokenListCreator(); + for (String text : request.text()) { + tokenListCreator.analyze(analyzer.tokenStream(field, text), analyzer, field, + includeAttributes); + } + detailResponse = new DetailAnalyzeResponse(new DetailAnalyzeResponse.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); + } + return detailResponse; + } + + private static TokenStream createStackedTokenStream(String source, CharFilterFactory[] charFilterFactories, TokenizerFactory tokenizerFactory, TokenFilterFactory[] tokenFilterFactories, int current) { + Reader reader = new FastStringReader(source); + for (CharFilterFactory charFilterFactory : charFilterFactories) { + reader = charFilterFactory.create(reader); + } + Tokenizer tokenizer = tokenizerFactory.create(); + tokenizer.setReader(reader); + TokenStream tokenStream = tokenizer; + for (int i = 0; i < current; i++) { + tokenStream = tokenFilterFactories[i].create(tokenStream); + } + return tokenStream; + } + + private static String writeCharStream(Reader input) { + final int BUFFER_SIZE = 1024; + char[] buf = new char[BUFFER_SIZE]; + int len; + StringBuilder sb = new StringBuilder(); + do { + try { + len = input.read(buf, 0, BUFFER_SIZE); + } catch (IOException e) { + throw new ElasticsearchException("failed to analyze (charFiltering)", e); + } + if (len > 0) + sb.append(buf, 0, len); + } while (len == BUFFER_SIZE); + return sb.toString(); + } + + private static class TokenListCreator { + int lastPosition = -1; + int lastOffset = 0; + List tokens; + + TokenListCreator() { + tokens = new ArrayList<>(); + } + + private void analyze(TokenStream stream, Analyzer analyzer, String field, Set includeAttributes) { + try { + stream.reset(); + CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); + PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); + OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); + TypeAttribute type = stream.addAttribute(TypeAttribute.class); + + while (stream.incrementToken()) { + int increment = posIncr.getPositionIncrement(); + if (increment > 0) { + lastPosition = lastPosition + increment; + } + tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), + lastOffset +offset.endOffset(), type.type(), extractExtendedAttributes(stream, includeAttributes))); + + } + stream.end(); + lastOffset += offset.endOffset(); + lastPosition += posIncr.getPositionIncrement(); + + lastPosition += analyzer.getPositionIncrementGap(field); + lastOffset += analyzer.getOffsetGap(field); + + } catch (IOException e) { + throw new ElasticsearchException("failed to analyze", e); + } finally { + IOUtils.closeWhileHandlingException(stream); + } + } + + private AnalyzeResponse.AnalyzeToken[] getArrayTokens() { + return tokens.toArray(new AnalyzeResponse.AnalyzeToken[tokens.size()]); + } + + } + + /** + * other attribute extract object. + * Extracted object group by AttributeClassName + * + * @param stream current TokenStream + * @param includeAttributes filtering attributes + * @return Map<key value> + */ + private static Map extractExtendedAttributes(TokenStream stream, final Set includeAttributes) { + final Map extendedAttributes = new TreeMap<>(); + + stream.reflectWith(new AttributeReflector() { + @Override + public void reflect(Class attClass, String key, Object value) { + if (CharTermAttribute.class.isAssignableFrom(attClass)) + return; + if (PositionIncrementAttribute.class.isAssignableFrom(attClass)) + return; + if (OffsetAttribute.class.isAssignableFrom(attClass)) + return; + if (TypeAttribute.class.isAssignableFrom(attClass)) + return; + if (includeAttributes == null || includeAttributes.isEmpty() || includeAttributes.contains(key.toLowerCase(Locale.ROOT))) { + if (value instanceof BytesRef) { + final BytesRef p = (BytesRef) value; + value = p.toString(); + } + extendedAttributes.put(key, value); + } + } + }); + + return extendedAttributes; } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index 57ceb21f41e..3a86911f464 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -21,7 +21,8 @@ package org.elasticsearch.rest.action.admin.indices.analyze; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -47,6 +48,17 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; */ public class RestAnalyzeAction extends BaseRestHandler { + public static class Fields { + public static final ParseField ANALYZER = new ParseField("analyzer"); + public static final ParseField TEXT = new ParseField("text"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField TOKENIZER = new ParseField("tokenizer"); + public static final ParseField TOKEN_FILTERS = new ParseField("token_filters", "filters"); + public static final ParseField CHAR_FILTERS = new ParseField("char_filters"); + public static final ParseField EXPLAIN = new ParseField("explain"); + public static final ParseField ATTRIBUTES = new ParseField("attributes"); + } + @Inject public RestAnalyzeAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); @@ -68,6 +80,8 @@ public class RestAnalyzeAction extends BaseRestHandler { analyzeRequest.tokenizer(request.param("tokenizer")); analyzeRequest.tokenFilters(request.paramAsStringArray("token_filters", request.paramAsStringArray("filters", analyzeRequest.tokenFilters()))); analyzeRequest.charFilters(request.paramAsStringArray("char_filters", analyzeRequest.charFilters())); + analyzeRequest.explain(request.paramAsBoolean("explain", false)); + analyzeRequest.attributes(request.paramAsStringArray("attributes", analyzeRequest.attributes())); if (RestActions.hasBodyContent(request)) { XContentType type = RestActions.guessBodyContentType(request); @@ -78,14 +92,14 @@ public class RestAnalyzeAction extends BaseRestHandler { } } else { // NOTE: if rest request with xcontent body has request parameters, the parameters does not override xcontent values - buildFromContent(RestActions.getRestContent(request), analyzeRequest); + buildFromContent(RestActions.getRestContent(request), analyzeRequest, parseFieldMatcher); } } client.admin().indices().analyze(analyzeRequest, new RestToXContentListener(channel)); } - public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest) { + public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) { try (XContentParser parser = XContentHelper.createParser(content)) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Malforrmed content, must start with an object"); @@ -95,9 +109,9 @@ public class RestAnalyzeAction extends BaseRestHandler { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if ("text".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { + } else if (parseFieldMatcher.match(currentFieldName, Fields.TEXT) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.text(parser.text()); - } else if ("text".equals(currentFieldName) && token == XContentParser.Token.START_ARRAY) { + } else if (parseFieldMatcher.match(currentFieldName, Fields.TEXT) && token == XContentParser.Token.START_ARRAY) { List texts = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { @@ -105,14 +119,14 @@ public class RestAnalyzeAction extends BaseRestHandler { } texts.add(parser.text()); } - analyzeRequest.text(texts.toArray(Strings.EMPTY_ARRAY)); - } else if ("analyzer".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { + analyzeRequest.text(texts.toArray(new String[texts.size()])); + } else if (parseFieldMatcher.match(currentFieldName, Fields.ANALYZER) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.analyzer(parser.text()); - } else if ("field".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { + } else if (parseFieldMatcher.match(currentFieldName, Fields.FIELD) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.field(parser.text()); - } else if ("tokenizer".equals(currentFieldName) && token == XContentParser.Token.VALUE_STRING) { + } else if (parseFieldMatcher.match(currentFieldName, Fields.TOKENIZER) && token == XContentParser.Token.VALUE_STRING) { analyzeRequest.tokenizer(parser.text()); - } else if (("token_filters".equals(currentFieldName) || "filters".equals(currentFieldName)) && token == XContentParser.Token.START_ARRAY) { + } else if (parseFieldMatcher.match(currentFieldName, Fields.TOKEN_FILTERS) && token == XContentParser.Token.START_ARRAY) { List filters = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { @@ -120,8 +134,8 @@ public class RestAnalyzeAction extends BaseRestHandler { } filters.add(parser.text()); } - analyzeRequest.tokenFilters(filters.toArray(Strings.EMPTY_ARRAY)); - } else if ("char_filters".equals(currentFieldName) && token == XContentParser.Token.START_ARRAY) { + analyzeRequest.tokenFilters(filters.toArray(new String[filters.size()])); + } else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS) && token == XContentParser.Token.START_ARRAY) { List charFilters = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token.isValue() == false) { @@ -129,7 +143,18 @@ public class RestAnalyzeAction extends BaseRestHandler { } charFilters.add(parser.text()); } - analyzeRequest.tokenFilters(charFilters.toArray(Strings.EMPTY_ARRAY)); + analyzeRequest.charFilters(charFilters.toArray(new String[charFilters.size()])); + } else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN) && token == XContentParser.Token.VALUE_BOOLEAN) { + analyzeRequest.explain(parser.booleanValue()); + } else if (parseFieldMatcher.match(currentFieldName, Fields.ATTRIBUTES) && token == XContentParser.Token.START_ARRAY){ + List attributes = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token.isValue() == false) { + throw new IllegalArgumentException(currentFieldName + " array element should only contain attribute name"); + } + attributes.add(parser.text()); + } + analyzeRequest.attributes(attributes.toArray(new String[attributes.size()])); } else { throw new IllegalArgumentException("Unknown parameter [" + currentFieldName + "] in request body or parameter is of the wrong type[" + token + "] "); } diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 9f4f2b58e70..80993229bec 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -22,11 +22,14 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.rest.action.admin.indices.analyze.RestAnalyzeAction; import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.core.IsNull; import java.io.IOException; @@ -36,8 +39,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; + /** * */ @@ -201,7 +206,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); @@ -213,7 +218,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); try { - RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest); + RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); fail("shouldn't get here"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); @@ -230,7 +235,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { .endObject().bytes(); try { - RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest); + RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); fail("shouldn't get here"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); @@ -267,4 +272,235 @@ public class AnalyzeActionIT extends ESIntegTestCase { } + public void testDetailAnalyze() throws Exception { + assertAcked(prepareCreate("test").addAlias(new Alias("alias")) + .setSettings( + settingsBuilder() + .put("index.analysis.char_filter.my_mapping.type", "mapping") + .putArray("index.analysis.char_filter.my_mapping.mappings", "PH=>F") + .put("index.analysis.analyzer.test_analyzer.type", "custom") + .put("index.analysis.analyzer.test_analyzer.position_increment_gap", "100") + .put("index.analysis.analyzer.test_analyzer.tokenizer", "standard") + .putArray("index.analysis.analyzer.test_analyzer.char_filter", "my_mapping") + .putArray("index.analysis.analyzer.test_analyzer.filter", "snowball"))); + ensureGreen(); + + for (int i = 0; i < 10; i++) { + AnalyzeResponse analyzeResponse = admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText("THIS IS A PHISH") + .setExplain(true).setCharFilters("my_mapping").setTokenizer("keyword").setTokenFilters("lowercase").get(); + + assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); + //charfilters + // global charfilter is not change text. + assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("my_mapping")); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("THIS IS A FISH")); + //tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("THIS IS A FISH")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(15)); + //tokenfilters + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("this is a fish")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getPosition(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getEndOffset(), equalTo(15)); + } + } + + public void testDetailAnalyzeWithNoIndex() throws Exception { + //analyzer only + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + .setExplain(true).setAnalyzer("simple").get(); + + assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().tokenfilters(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().charfilters(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple")); + assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(4)); + } + + public void testDetailAnalyzeCustomAnalyzerWithNoIndex() throws Exception { + //analyzer only + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + .setExplain(true).setAnalyzer("simple").get(); + + assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().tokenfilters(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().charfilters(), IsNull.nullValue()); + assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple")); + assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(4)); + + //custom analyzer + analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + .setExplain(true).setCharFilters("html_strip").setTokenizer("keyword").setTokenFilters("lowercase").get(); + assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); + //charfilters + // global charfilter is not change text. + assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("html_strip")); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("\nTHIS IS A TEST\n")); + //tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("\nTHIS IS A TEST\n")); + //tokenfilters + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("\nthis is a test\n")); + + + //check other attributes + analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") + .setExplain(true).setTokenizer("standard").setTokenFilters("snowball").get(); + + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubl")); + String[] expectedAttributesKey = { + "bytes", + "positionLength", + "keyword"}; + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().size(), equalTo(expectedAttributesKey.length)); + Object extendedAttribute; + + for (String key : expectedAttributesKey) { + extendedAttribute = analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().get(key); + assertThat(extendedAttribute, notNullValue()); + } + } + + public void testDetailAnalyzeSpecifyAttributes() throws Exception { + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") + .setExplain(true).setTokenizer("standard").setTokenFilters("snowball").setAttributes("keyword").get(); + + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubl")); + String[] expectedAttributesKey = { + "keyword"}; + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().size(), equalTo(expectedAttributesKey.length)); + Object extendedAttribute; + + for (String key : expectedAttributesKey) { + extendedAttribute = analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().get(key); + assertThat(extendedAttribute, notNullValue()); + } + } + + public void testDetailAnalyzeWithMultiValues() throws Exception { + assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); + ensureGreen(); + client().admin().indices().preparePutMapping("test") + .setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get(); + + String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"}; + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) + .setExplain(true).setField("simple").setText(texts).execute().get(); + + assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple")); + assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(7)); + AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3]; + + assertThat(token.getTerm(), equalTo("test")); + assertThat(token.getPosition(), equalTo(3)); + assertThat(token.getStartOffset(), equalTo(10)); + assertThat(token.getEndOffset(), equalTo(14)); + + token = analyzeResponse.detail().analyzer().getTokens()[5]; + assertThat(token.getTerm(), equalTo("second")); + assertThat(token.getPosition(), equalTo(105)); + assertThat(token.getStartOffset(), equalTo(19)); + assertThat(token.getEndOffset(), equalTo(25)); + } + + public void testDetailAnalyzeWithMultiValuesWithCustomAnalyzer() throws Exception { + assertAcked(prepareCreate("test").addAlias(new Alias("alias")) + .setSettings( + settingsBuilder() + .put("index.analysis.char_filter.my_mapping.type", "mapping") + .putArray("index.analysis.char_filter.my_mapping.mappings", "PH=>F") + .put("index.analysis.analyzer.test_analyzer.type", "custom") + .put("index.analysis.analyzer.test_analyzer.position_increment_gap", "100") + .put("index.analysis.analyzer.test_analyzer.tokenizer", "standard") + .putArray("index.analysis.analyzer.test_analyzer.char_filter", "my_mapping") + .putArray("index.analysis.analyzer.test_analyzer.filter", "snowball", "lowercase"))); + ensureGreen(); + + client().admin().indices().preparePutMapping("test") + .setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get(); + + //only analyzer = + String[] texts = new String[]{"this is a PHISH", "the troubled text"}; + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) + .setExplain(true).setAnalyzer("test_analyzer").setText(texts).execute().get(); + + // charfilter + assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("my_mapping")); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(2)); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("this is a FISH")); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[1], equalTo("the troubled text")); + + // tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("standard")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(7)); + AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().tokenizer().getTokens()[3]; + + assertThat(token.getTerm(), equalTo("FISH")); + assertThat(token.getPosition(), equalTo(3)); + assertThat(token.getStartOffset(), equalTo(10)); + assertThat(token.getEndOffset(), equalTo(15)); + + token = analyzeResponse.detail().tokenizer().getTokens()[5]; + assertThat(token.getTerm(), equalTo("troubled")); + assertThat(token.getPosition(), equalTo(105)); + assertThat(token.getStartOffset(), equalTo(20)); + assertThat(token.getEndOffset(), equalTo(28)); + + // tokenfilter(snowball) + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(2)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(7)); + token = analyzeResponse.detail().tokenfilters()[0].getTokens()[3]; + + assertThat(token.getTerm(), equalTo("FISH")); + assertThat(token.getPosition(), equalTo(3)); + assertThat(token.getStartOffset(), equalTo(10)); + assertThat(token.getEndOffset(), equalTo(15)); + + token = analyzeResponse.detail().tokenfilters()[0].getTokens()[5]; + assertThat(token.getTerm(), equalTo("troubl")); + assertThat(token.getPosition(), equalTo(105)); + assertThat(token.getStartOffset(), equalTo(20)); + assertThat(token.getEndOffset(), equalTo(28)); + + // tokenfilter(lowercase) + assertThat(analyzeResponse.detail().tokenfilters()[1].getName(), equalTo("lowercase")); + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens().length, equalTo(7)); + token = analyzeResponse.detail().tokenfilters()[1].getTokens()[3]; + + assertThat(token.getTerm(), equalTo("fish")); + assertThat(token.getPosition(), equalTo(3)); + assertThat(token.getStartOffset(), equalTo(10)); + assertThat(token.getEndOffset(), equalTo(15)); + + token = analyzeResponse.detail().tokenfilters()[0].getTokens()[5]; + assertThat(token.getTerm(), equalTo("troubl")); + assertThat(token.getPosition(), equalTo(105)); + assertThat(token.getStartOffset(), equalTo(20)); + assertThat(token.getEndOffset(), equalTo(28)); + + + } + } diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index 1a256a6330a..1e8cd77ef09 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -100,3 +100,74 @@ provided it doesn't start with `{` : -------------------------------------------------- curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filters=lowercase&char_filters=html_strip' -d 'this is a test' -------------------------------------------------- + +=== Explain Analyze + +If you want to get more advanced details, set `explain` to `true` (defaults to `false`). It will output all token attributes for each token. +You can filter token attributes you want to output by setting `attributes` option. + +experimental[The format of the additional detail information is experimental and can change at any time] + +[source,js] +-------------------------------------------------- +GET test/_analyze +{ + "tokenizer" : "standard", + "token_filters" : ["snowball"], + "text" : "detailed output", + "explain" : true, + "attributes" : ["keyword"] <1> +} +-------------------------------------------------- +// AUTOSENSE +<1> Set "keyword" to output "keyword" attribute only + +coming[2.0.0, body based parameters were added in 2.0.0] + +The request returns the following result: + +[source,js] +-------------------------------------------------- +{ + "detail" : { + "custom_analyzer" : true, + "charfilters" : [ ], + "tokenizer" : { + "name" : "standard", + "tokens" : [ { + "token" : "detailed", + "start_offset" : 0, + "end_offset" : 8, + "type" : "", + "position" : 0 + }, { + "token" : "output", + "start_offset" : 9, + "end_offset" : 15, + "type" : "", + "position" : 1 + } ] + }, + "tokenfilters" : [ { + "name" : "snowball", + "tokens" : [ { + "token" : "detail", + "start_offset" : 0, + "end_offset" : 8, + "type" : "", + "position" : 0, + "keyword" : false <1> + }, { + "token" : "output", + "start_offset" : 9, + "end_offset" : 15, + "type" : "", + "position" : 1, + "keyword" : false <1> + } ] + } ] + } +} +-------------------------------------------------- +<1> Output only "keyword" attribute, since specify "attributes" in the request. + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json index 00b0ec13a5c..9fe9bfe3cad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.analyze.json @@ -44,6 +44,14 @@ "type" : "string", "description" : "The name of the tokenizer to use for the analysis" }, + "detail": { + "type" : "boolean", + "description" : "With `true`, outputs more advanced details. (default: false)" + }, + "attributes": { + "type" : "list", + "description" : "A comma-separated list of token attributes to output, this parameter works only with `detail=true`" + }, "format": { "type": "enum", "options" : ["detailed","text"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml index 49420672861..0b1a090303e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml @@ -71,3 +71,31 @@ setup: - length: {tokens: 2 } - match: { tokens.0.token: foo bar } - match: { tokens.1.token: baz } +--- +"Detail response with Analyzer": + - do: + indices.analyze: + body: {"text": "This is troubled", "analyzer": standard, "explain": true} + - length: { detail.analyzer.tokens: 3 } + - match: { detail.analyzer.name: standard } + - match: { detail.analyzer.tokens.0.token: this } + - match: { detail.analyzer.tokens.1.token: is } + - match: { detail.analyzer.tokens.2.token: troubled } +--- +"Detail output spcified attribute": + - do: + indices.analyze: + body: {"text": "This is troubled", "char_filters": ["html_strip"], "filters": ["snowball"], "tokenizer": standard, "explain": true, "attributes": ["keyword"]} + - length: { detail.charfilters: 1 } + - length: { detail.tokenizer.tokens: 3 } + - length: { detail.tokenfilters.0.tokens: 3 } + - match: { detail.tokenizer.name: standard } + - match: { detail.tokenizer.tokens.0.token: This } + - match: { detail.tokenizer.tokens.1.token: is } + - match: { detail.tokenizer.tokens.2.token: troubled } + - match: { detail.tokenfilters.0.name: snowball } + - match: { detail.tokenfilters.0.tokens.0.token: This } + - match: { detail.tokenfilters.0.tokens.1.token: is } + - match: { detail.tokenfilters.0.tokens.2.token: troubl } + - match: { detail.tokenfilters.0.tokens.2.keyword: false } + From fafeb3abddd8e691e966064bfd3131714e20d8a2 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 9 Dec 2015 12:36:50 +0100 Subject: [PATCH 57/57] Introduce a common base response class to all single doc write ops IndexResponse, DeleteResponse and UpdateResponse share some logic. This can be unified to a single DocWriteResponse base class. On top, some replication actions are now not about write operations anymore. This commit renames ActionWriteResponse to ReplicationResponse Last some toXContent is moved from the Rest layer to the actual response classes, for more code re-sharing. Closes #15334 --- .../action/DocWriteResponse.java | 130 ++++++++++++++++++ ...Response.java => ReplicationResponse.java} | 9 +- .../indices/flush/TransportFlushAction.java | 8 +- .../flush/TransportShardFlushAction.java | 12 +- .../refresh/TransportRefreshAction.java | 8 +- .../refresh/TransportShardRefreshAction.java | 12 +- .../action/bulk/BulkItemResponse.java | 82 ++++++----- .../action/bulk/BulkShardResponse.java | 4 +- .../action/bulk/TransportShardBulkAction.java | 4 +- .../action/delete/DeleteResponse.java | 86 ++++++------ .../action/delete/TransportDeleteAction.java | 2 +- .../action/index/IndexResponse.java | 82 +++++------ .../action/index/TransportIndexAction.java | 2 +- .../TransportBroadcastReplicationAction.java | 19 ++- .../TransportReplicationAction.java | 20 +-- .../action/update/TransportUpdateAction.java | 6 +- .../action/update/UpdateHelper.java | 11 +- .../action/update/UpdateResponse.java | 97 +++++++------ .../rest/action/bulk/RestBulkAction.java | 60 +------- .../rest/action/delete/RestDeleteAction.java | 31 +---- .../rest/action/index/RestIndexAction.java | 32 +---- .../rest/action/update/RestUpdateAction.java | 38 +---- .../BroadcastReplicationTests.java | 36 ++--- .../TransportReplicationActionTests.java | 6 +- .../elasticsearch/document/ShardInfoIT.java | 6 +- .../TransportDeleteByQueryActionTests.java | 5 +- 26 files changed, 385 insertions(+), 423 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/action/DocWriteResponse.java rename core/src/main/java/org/elasticsearch/action/{ActionWriteResponse.java => ReplicationResponse.java} (96%) diff --git a/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java new file mode 100644 index 00000000000..009d3fc47a9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * A base class for the response of a write operation that involves a single doc + */ +public abstract class DocWriteResponse extends ReplicationResponse implements StatusToXContent { + + private ShardId shardId; + private String id; + private String type; + private long version; + + public DocWriteResponse(ShardId shardId, String type, String id, long version) { + this.shardId = shardId; + this.type = type; + this.id = id; + this.version = version; + } + + // needed for deserialization + protected DocWriteResponse() { + } + + /** + * The index the document was changed in. + */ + public String getIndex() { + return this.shardId.getIndex(); + } + + + /** + * The exact shard the document was changed in. + */ + public ShardId getShardId() { + return this.shardId; + } + + /** + * The type of the document changed. + */ + public String getType() { + return this.type; + } + + /** + * The id of the document changed. + */ + public String getId() { + return this.id; + } + + /** + * Returns the current version of the doc. + */ + public long getVersion() { + return this.version; + } + + /** returns the rest status for this response (based on {@link ShardInfo#status()} */ + public RestStatus status() { + return getShardInfo().status(); + } + + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + shardId = ShardId.readShardId(in); + type = in.readString(); + id = in.readString(); + version = in.readZLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardId.writeTo(out); + out.writeString(type); + out.writeString(id); + out.writeZLong(version); + } + + static final class Fields { + static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); + static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); + static final XContentBuilderString _ID = new XContentBuilderString("_id"); + static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + ReplicationResponse.ShardInfo shardInfo = getShardInfo(); + builder.field(Fields._INDEX, shardId.getIndex()) + .field(Fields._TYPE, type) + .field(Fields._ID, id) + .field(Fields._VERSION, version); + shardInfo.toXContent(builder, params); + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/action/ActionWriteResponse.java b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java similarity index 96% rename from core/src/main/java/org/elasticsearch/action/ActionWriteResponse.java rename to core/src/main/java/org/elasticsearch/action/ReplicationResponse.java index f4152ac85e4..4e358c8d42a 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionWriteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/ReplicationResponse.java @@ -21,7 +21,6 @@ package org.elasticsearch.action; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,25 +29,23 @@ import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.Collections; /** * Base class for write action responses. */ -public class ActionWriteResponse extends ActionResponse { +public class ReplicationResponse extends ActionResponse { - public final static ActionWriteResponse.ShardInfo.Failure[] EMPTY = new ActionWriteResponse.ShardInfo.Failure[0]; + public final static ReplicationResponse.ShardInfo.Failure[] EMPTY = new ReplicationResponse.ShardInfo.Failure[0]; private ShardInfo shardInfo; @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - shardInfo = ActionWriteResponse.ShardInfo.readShardInfo(in); + shardInfo = ReplicationResponse.ShardInfo.readShardInfo(in); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index 00e03ffdf6e..d2a8f1abcbf 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.flush; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; @@ -36,7 +36,7 @@ import java.util.List; /** * Flush Action. */ -public class TransportFlushAction extends TransportBroadcastReplicationAction { +public class TransportFlushAction extends TransportBroadcastReplicationAction { @Inject public TransportFlushAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, @@ -47,8 +47,8 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction { +public class TransportShardFlushAction extends TransportReplicationAction { public static final String NAME = FlushAction.NAME + "[s]"; @@ -53,16 +53,16 @@ public class TransportShardFlushAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, ShardFlushRequest shardRequest) throws Throwable { + protected Tuple shardOperationOnPrimary(MetaData metaData, ShardFlushRequest shardRequest) throws Throwable { IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); indexShard.flush(shardRequest.getRequest()); logger.trace("{} flush request executed on primary", indexShard.shardId()); - return new Tuple<>(new ActionWriteResponse(), shardRequest); + return new Tuple<>(new ReplicationResponse(), shardRequest); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index a5e30abc12d..a76b714b31d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.ReplicationRequest; @@ -37,7 +37,7 @@ import java.util.List; /** * Refresh action. */ -public class TransportRefreshAction extends TransportBroadcastReplicationAction { +public class TransportRefreshAction extends TransportBroadcastReplicationAction { @Inject public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, @@ -48,8 +48,8 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction< } @Override - protected ActionWriteResponse newShardResponse() { - return new ActionWriteResponse(); + protected ReplicationResponse newShardResponse() { + return new ReplicationResponse(); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 02af4ad89a0..c78977fb362 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.refresh; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.replication.TransportReplicationAction; @@ -41,7 +41,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportShardRefreshAction extends TransportReplicationAction { +public class TransportShardRefreshAction extends TransportReplicationAction { public static final String NAME = RefreshAction.NAME + "[s]"; @@ -55,16 +55,16 @@ public class TransportShardRefreshAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable { + protected Tuple shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable { IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on primary", indexShard.shardId()); - return new Tuple<>(new ActionWriteResponse(), shardRequest); + return new Tuple<>(new ReplicationResponse(), shardRequest); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index 80e86eaaf17..982700016b7 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -19,14 +19,18 @@ package org.elasticsearch.action.bulk; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -35,7 +39,39 @@ import java.io.IOException; * Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id * of the relevant action, and if it has failed or not (with the failure message incase it failed). */ -public class BulkItemResponse implements Streamable { +public class BulkItemResponse implements Streamable, StatusToXContent { + + @Override + public RestStatus status() { + return failure == null ? response.status() : failure.getStatus(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(opType); + if (failure == null) { + response.toXContent(builder, params); + builder.field(Fields.STATUS, response.status()); + } else { + builder.field(Fields._INDEX, failure.getIndex()); + builder.field(Fields._TYPE, failure.getType()); + builder.field(Fields._ID, failure.getId()); + builder.field(Fields.STATUS, failure.getStatus()); + builder.startObject(Fields.ERROR); + ElasticsearchException.toXContent(builder, params, failure.getCause()); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + static final class Fields { + static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); + static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); + static final XContentBuilderString _ID = new XContentBuilderString("_id"); + static final XContentBuilderString STATUS = new XContentBuilderString("status"); + static final XContentBuilderString ERROR = new XContentBuilderString("error"); + } /** * Represents a failure. @@ -99,7 +135,7 @@ public class BulkItemResponse implements Streamable { private String opType; - private ActionWriteResponse response; + private DocWriteResponse response; private Failure failure; @@ -107,7 +143,7 @@ public class BulkItemResponse implements Streamable { } - public BulkItemResponse(int id, String opType, ActionWriteResponse response) { + public BulkItemResponse(int id, String opType, DocWriteResponse response) { this.id = id; this.opType = opType; this.response = response; @@ -140,14 +176,7 @@ public class BulkItemResponse implements Streamable { if (failure != null) { return failure.getIndex(); } - if (response instanceof IndexResponse) { - return ((IndexResponse) response).getIndex(); - } else if (response instanceof DeleteResponse) { - return ((DeleteResponse) response).getIndex(); - } else if (response instanceof UpdateResponse) { - return ((UpdateResponse) response).getIndex(); - } - return null; + return response.getIndex(); } /** @@ -157,14 +186,7 @@ public class BulkItemResponse implements Streamable { if (failure != null) { return failure.getType(); } - if (response instanceof IndexResponse) { - return ((IndexResponse) response).getType(); - } else if (response instanceof DeleteResponse) { - return ((DeleteResponse) response).getType(); - } else if (response instanceof UpdateResponse) { - return ((UpdateResponse) response).getType(); - } - return null; + return response.getType(); } /** @@ -174,14 +196,7 @@ public class BulkItemResponse implements Streamable { if (failure != null) { return failure.getId(); } - if (response instanceof IndexResponse) { - return ((IndexResponse) response).getId(); - } else if (response instanceof DeleteResponse) { - return ((DeleteResponse) response).getId(); - } else if (response instanceof UpdateResponse) { - return ((UpdateResponse) response).getId(); - } - return null; + return response.getId(); } /** @@ -191,21 +206,14 @@ public class BulkItemResponse implements Streamable { if (failure != null) { return -1; } - if (response instanceof IndexResponse) { - return ((IndexResponse) response).getVersion(); - } else if (response instanceof DeleteResponse) { - return ((DeleteResponse) response).getVersion(); - } else if (response instanceof UpdateResponse) { - return ((UpdateResponse) response).getVersion(); - } - return -1; + return response.getVersion(); } /** * The actual response ({@link IndexResponse} or {@link DeleteResponse}). null in * case of failure. */ - public T getResponse() { + public T getResponse() { return (T) response; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java index 6b08627f5de..76c80a9b064 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkShardResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -29,7 +29,7 @@ import java.io.IOException; /** * */ -public class BulkShardResponse extends ActionWriteResponse { +public class BulkShardResponse extends ReplicationResponse { private ShardId shardId; private BulkItemResponse[] responses; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 2cc81556222..2597695a1e2 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -204,7 +204,7 @@ public class TransportShardBulkAction extends TransportReplicationAction 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); @@ -216,7 +216,7 @@ public class TransportShardBulkAction extends TransportReplicationAction writeResult = updateResult.writeResult; DeleteResponse response = writeResult.response(); DeleteRequest deleteRequest = updateResult.request(); - updateResponse = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), false); + updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false); updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null)); // Replace the update request to the translated delete request to execute on the replica. item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest); diff --git a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java index 26cfa57a13d..57781547266 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java +++ b/core/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java @@ -19,9 +19,13 @@ package org.elasticsearch.action.delete; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -31,53 +35,19 @@ import java.io.IOException; * @see org.elasticsearch.action.delete.DeleteRequest * @see org.elasticsearch.client.Client#delete(DeleteRequest) */ -public class DeleteResponse extends ActionWriteResponse { +public class DeleteResponse extends DocWriteResponse { - private String index; - private String id; - private String type; - private long version; private boolean found; public DeleteResponse() { } - public DeleteResponse(String index, String type, String id, long version, boolean found) { - this.index = index; - this.id = id; - this.type = type; - this.version = version; + public DeleteResponse(ShardId shardId, String type, String id, long version, boolean found) { + super(shardId, type, id, version); this.found = found; } - /** - * The index the document was deleted from. - */ - public String getIndex() { - return this.index; - } - - /** - * The type of the document deleted. - */ - public String getType() { - return this.type; - } - - /** - * The id of the document deleted. - */ - public String getId() { - return this.id; - } - - /** - * The version of the delete operation. - */ - public long getVersion() { - return this.version; - } /** * Returns true if a doc was found to delete. @@ -89,20 +59,44 @@ public class DeleteResponse extends ActionWriteResponse { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - index = in.readString(); - type = in.readString(); - id = in.readString(); - version = in.readLong(); found = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(index); - out.writeString(type); - out.writeString(id); - out.writeLong(version); out.writeBoolean(found); } + + @Override + public RestStatus status() { + if (found == false) { + return RestStatus.NOT_FOUND; + } + return super.status(); + } + + static final class Fields { + static final XContentBuilderString FOUND = new XContentBuilderString("found"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.FOUND, isFound()); + super.toXContent(builder, params); + return builder; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("DeleteResponse["); + builder.append("index=").append(getIndex()); + builder.append(",type=").append(getType()); + builder.append(",id=").append(getId()); + builder.append(",version=").append(getVersion()); + builder.append(",found=").append(found); + builder.append(",shards=").append(getShardInfo()); + return builder.append("]").toString(); + } } diff --git a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index 1b3faec9999..ca66b285753 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -140,7 +140,7 @@ public class TransportDeleteAction extends TransportReplicationAction( - new DeleteResponse(indexShard.shardId().getIndex(), request.type(), request.id(), delete.version(), delete.found()), + new DeleteResponse(indexShard.shardId(), request.type(), request.id(), delete.version(), delete.found()), delete.getTranslogLocation()); } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java index 5727b2b673b..665327a749f 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -19,9 +19,13 @@ package org.elasticsearch.action.index; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -31,54 +35,19 @@ import java.io.IOException; * @see org.elasticsearch.action.index.IndexRequest * @see org.elasticsearch.client.Client#index(IndexRequest) */ -public class IndexResponse extends ActionWriteResponse { +public class IndexResponse extends DocWriteResponse { - private String index; - private String id; - private String type; - private long version; private boolean created; public IndexResponse() { } - public IndexResponse(String index, String type, String id, long version, boolean created) { - this.index = index; - this.id = id; - this.type = type; - this.version = version; + public IndexResponse(ShardId shardId, String type, String id, long version, boolean created) { + super(shardId, type, id, version); this.created = created; } - /** - * The index the document was indexed into. - */ - public String getIndex() { - return this.index; - } - - /** - * The type of the document indexed. - */ - public String getType() { - return this.type; - } - - /** - * The id of the document indexed. - */ - public String getId() { - return this.id; - } - - /** - * Returns the current version of the doc indexed. - */ - public long getVersion() { - return this.version; - } - /** * Returns true if the document was created, false if updated. */ @@ -86,23 +55,23 @@ public class IndexResponse extends ActionWriteResponse { return this.created; } + @Override + public RestStatus status() { + if (created) { + return RestStatus.CREATED; + } + return super.status(); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - index = in.readString(); - type = in.readString(); - id = in.readString(); - version = in.readLong(); created = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(index); - out.writeString(type); - out.writeString(id); - out.writeLong(version); out.writeBoolean(created); } @@ -110,12 +79,23 @@ public class IndexResponse extends ActionWriteResponse { public String toString() { StringBuilder builder = new StringBuilder(); builder.append("IndexResponse["); - builder.append("index=").append(index); - builder.append(",type=").append(type); - builder.append(",id=").append(id); - builder.append(",version=").append(version); + builder.append("index=").append(getIndex()); + builder.append(",type=").append(getType()); + builder.append(",id=").append(getId()); + builder.append(",version=").append(getVersion()); builder.append(",created=").append(created); builder.append(",shards=").append(getShardInfo()); return builder.append("]").toString(); } + + static final class Fields { + static final XContentBuilderString CREATED = new XContentBuilderString("created"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + super.toXContent(builder, params); + builder.field(Fields.CREATED, isCreated()); + return builder; + } } diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index 37e76835270..620056ded4e 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -222,7 +222,7 @@ public class TransportIndexAction extends TransportReplicationAction(new IndexResponse(shardId.getIndex(), request.type(), request.id(), request.version(), created), operation.getTranslogLocation()); + return new WriteResult<>(new IndexResponse(shardId, request.type(), request.id(), request.version(), created), operation.getTranslogLocation()); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index ddd4d42f7a6..33a9d349e80 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -22,9 +22,8 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; @@ -53,7 +52,7 @@ import java.util.function.Supplier; * Base class for requests that should be executed on all shards of an index or several indices. * This action sends shard requests to all primary shards of the indices and they are then replicated like write requests */ -public abstract class TransportBroadcastReplicationAction extends HandledTransportAction { +public abstract class TransportBroadcastReplicationAction extends HandledTransportAction { private final TransportReplicationAction replicatedBroadcastShardAction; private final ClusterService clusterService; @@ -91,15 +90,15 @@ public abstract class TransportBroadcastReplicationAction shardFailures = null; for (int i = 0; i < shardsResponses.size(); i++) { - ActionWriteResponse shardResponse = shardsResponses.get(i); + ReplicationResponse shardResponse = shardsResponses.get(i); if (shardResponse == null) { // non active shard, ignore } else { @@ -152,7 +151,7 @@ public abstract class TransportBroadcastReplicationAction(); } - for (ActionWriteResponse.ShardInfo.Failure failure : shardResponse.getShardInfo().getFailures()) { + for (ReplicationResponse.ShardInfo.Failure failure : shardResponse.getShardInfo().getFailures()) { shardFailures.add(new DefaultShardOperationFailedException(new BroadcastShardOperationFailedException(new ShardId(failure.index(), failure.shardId()), failure.getCause()))); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 2f9fd6d483c..26c439c0a3d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.ActionFilters; @@ -78,7 +78,7 @@ import java.util.function.Supplier; * primary node to validate request before primary operation followed by sampling state again for resolving * nodes with replica copies to perform replication. */ -public abstract class TransportReplicationAction extends TransportAction { +public abstract class TransportReplicationAction extends TransportAction { public static final String SHARD_FAILURE_TIMEOUT = "action.support.replication.shard.failure_timeout"; @@ -214,7 +214,7 @@ public abstract class TransportReplicationAction { + protected static class WriteResult { public final T response; public final Translog.Location location; @@ -225,10 +225,10 @@ public abstract class TransportReplicationAction T response() { + public T response() { // this sets total, pending and failed to 0 and this is ok, because we will embed this into the replica // request and not use it - response.setShardInfo(new ActionWriteResponse.ShardInfo()); + response.setShardInfo(new ReplicationResponse.ShardInfo()); return (T) response; } @@ -908,20 +908,20 @@ public abstract class TransportReplicationAction entry : shardReplicaFailures.entrySet()) { RestStatus restStatus = ExceptionsHelper.status(entry.getValue()); - failuresArray[slot++] = new ActionWriteResponse.ShardInfo.Failure( + failuresArray[slot++] = new ReplicationResponse.ShardInfo.Failure( shardId.getIndex(), shardId.getId(), entry.getKey(), entry.getValue(), restStatus, false ); } } else { - failuresArray = ActionWriteResponse.EMPTY; + failuresArray = ReplicationResponse.EMPTY; } - finalResponse.setShardInfo(new ActionWriteResponse.ShardInfo( + finalResponse.setShardInfo(new ReplicationResponse.ShardInfo( totalShards, success.get(), failuresArray diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index b2d24fef714..e5edc1af96b 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -175,7 +175,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio indexAction.execute(upsertRequest, new ActionListener() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); if (request.fields() != null && request.fields().length > 0) { Tuple> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); @@ -212,7 +212,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio indexAction.execute(indexRequest, new ActionListener() { @Override public void onResponse(IndexResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated()); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); listener.onResponse(update); } @@ -240,7 +240,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio deleteAction.execute(deleteRequest, new ActionListener() { @Override public void onResponse(DeleteResponse response) { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getIndex(), response.getType(), response.getId(), response.getVersion(), false); + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false); update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); listener.onResponse(update); } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 4bdcd43023f..9f8b2a2e7be 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -83,9 +83,10 @@ public class UpdateHelper extends AbstractComponent { @SuppressWarnings("unchecked") protected Result prepare(UpdateRequest request, final GetResult getResult) { long getDateNS = System.nanoTime(); + final ShardId shardId = new ShardId(getResult.getIndex(), request.shardId()); if (!getResult.isExists()) { if (request.upsertRequest() == null && !request.docAsUpsert()) { - throw new DocumentMissingException(new ShardId(request.index(), request.shardId()), request.type(), request.id()); + throw new DocumentMissingException(shardId, request.type(), request.id()); } IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest(); TimeValue ttl = indexRequest.ttl(); @@ -113,7 +114,7 @@ public class UpdateHelper extends AbstractComponent { logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, request.script.getScript()); } - UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false); update.setGetResult(getResult); return new Result(update, Operation.NONE, upsertDoc, XContentType.JSON); @@ -145,7 +146,7 @@ public class UpdateHelper extends AbstractComponent { if (getResult.internalSourceRef() == null) { // no source, we can't do nothing, through a failure... - throw new DocumentSourceMissingException(new ShardId(request.index(), request.shardId()), request.type(), request.id()); + throw new DocumentSourceMissingException(shardId, request.type(), request.id()); } Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); @@ -231,12 +232,12 @@ public class UpdateHelper extends AbstractComponent { .consistencyLevel(request.consistencyLevel()); return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType); } else if ("none".equals(operation)) { - UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false); update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } else { logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript()); - UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false); + UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false); return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType); } } diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index af6438097c8..2f3146b0644 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -19,21 +19,21 @@ package org.elasticsearch.action.update; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; /** */ -public class UpdateResponse extends ActionWriteResponse { +public class UpdateResponse extends DocWriteResponse { - private String index; - private String id; - private String type; - private long version; private boolean created; private GetResult getResult; @@ -44,47 +44,16 @@ public class UpdateResponse extends ActionWriteResponse { * Constructor to be used when a update didn't translate in a write. * For example: update script with operation set to none */ - public UpdateResponse(String index, String type, String id, long version, boolean created) { - this(new ShardInfo(0, 0), index, type, id, version, created); + public UpdateResponse(ShardId shardId, String type, String id, long version, boolean created) { + this(new ShardInfo(0, 0), shardId, type, id, version, created); } - public UpdateResponse(ShardInfo shardInfo, String index, String type, String id, long version, boolean created) { + public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id, long version, boolean created) { + super(shardId, type, id, version); setShardInfo(shardInfo); - this.index = index; - this.id = id; - this.type = type; - this.version = version; this.created = created; } - /** - * The index the document was indexed into. - */ - public String getIndex() { - return this.index; - } - - /** - * The type of the document indexed. - */ - public String getType() { - return this.type; - } - - /** - * The id of the document indexed. - */ - public String getId() { - return this.id; - } - - /** - * Returns the current version of the doc indexed. - */ - public long getVersion() { - return this.version; - } - public void setGetResult(GetResult getResult) { this.getResult = getResult; } @@ -101,13 +70,17 @@ public class UpdateResponse extends ActionWriteResponse { } + @Override + public RestStatus status() { + if (created) { + return RestStatus.CREATED; + } + return super.status(); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - index = in.readString(); - type = in.readString(); - id = in.readString(); - version = in.readLong(); created = in.readBoolean(); if (in.readBoolean()) { getResult = GetResult.readGetResult(in); @@ -117,10 +90,6 @@ public class UpdateResponse extends ActionWriteResponse { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(index); - out.writeString(type); - out.writeString(id); - out.writeLong(version); out.writeBoolean(created); if (getResult == null) { out.writeBoolean(false); @@ -129,4 +98,34 @@ public class UpdateResponse extends ActionWriteResponse { getResult.writeTo(out); } } + + + static final class Fields { + static final XContentBuilderString GET = new XContentBuilderString("get"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + super.toXContent(builder, params); + if (getGetResult() != null) { + builder.startObject(Fields.GET); + getGetResult().toXContentEmbedded(builder, params); + builder.endObject(); + } + return builder; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("UpdateResponse["); + builder.append("index=").append(getIndex()); + builder.append(",type=").append(getType()); + builder.append(",id=").append(getId()); + builder.append(",version=").append(getVersion()); + builder.append(",created=").append(created); + builder.append(",shards=").append(getShardInfo()); + return builder.append("]").toString(); + } + } diff --git a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java index 90184352714..536b73ba2b5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/bulk/RestBulkAction.java @@ -19,16 +19,11 @@ package org.elasticsearch.rest.action.bulk; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.BulkShardRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Strings; @@ -96,52 +91,7 @@ public class RestBulkAction extends BaseRestHandler { builder.startArray(Fields.ITEMS); for (BulkItemResponse itemResponse : response) { builder.startObject(); - builder.startObject(itemResponse.getOpType()); - builder.field(Fields._INDEX, itemResponse.getIndex()); - builder.field(Fields._TYPE, itemResponse.getType()); - builder.field(Fields._ID, itemResponse.getId()); - long version = itemResponse.getVersion(); - if (version != -1) { - builder.field(Fields._VERSION, itemResponse.getVersion()); - } - if (itemResponse.isFailed()) { - builder.field(Fields.STATUS, itemResponse.getFailure().getStatus().getStatus()); - builder.startObject(Fields.ERROR); - ElasticsearchException.toXContent(builder, request, itemResponse.getFailure().getCause()); - builder.endObject(); - } else { - ActionWriteResponse.ShardInfo shardInfo = itemResponse.getResponse().getShardInfo(); - shardInfo.toXContent(builder, request); - if (itemResponse.getResponse() instanceof DeleteResponse) { - DeleteResponse deleteResponse = itemResponse.getResponse(); - if (deleteResponse.isFound()) { - builder.field(Fields.STATUS, shardInfo.status().getStatus()); - } else { - builder.field(Fields.STATUS, RestStatus.NOT_FOUND.getStatus()); - } - builder.field(Fields.FOUND, deleteResponse.isFound()); - } else if (itemResponse.getResponse() instanceof IndexResponse) { - IndexResponse indexResponse = itemResponse.getResponse(); - if (indexResponse.isCreated()) { - builder.field(Fields.STATUS, RestStatus.CREATED.getStatus()); - } else { - builder.field(Fields.STATUS, shardInfo.status().getStatus()); - } - } else if (itemResponse.getResponse() instanceof UpdateResponse) { - UpdateResponse updateResponse = itemResponse.getResponse(); - if (updateResponse.isCreated()) { - builder.field(Fields.STATUS, RestStatus.CREATED.getStatus()); - } else { - builder.field(Fields.STATUS, shardInfo.status().getStatus()); - } - if (updateResponse.getGetResult() != null) { - builder.startObject(Fields.GET); - updateResponse.getGetResult().toXContentEmbedded(builder, request); - builder.endObject(); - } - } - } - builder.endObject(); + itemResponse.toXContent(builder, request); builder.endObject(); } builder.endArray(); @@ -155,15 +105,7 @@ public class RestBulkAction extends BaseRestHandler { static final class Fields { static final XContentBuilderString ITEMS = new XContentBuilderString("items"); static final XContentBuilderString ERRORS = new XContentBuilderString("errors"); - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString STATUS = new XContentBuilderString("status"); - static final XContentBuilderString ERROR = new XContentBuilderString("error"); static final XContentBuilderString TOOK = new XContentBuilderString("took"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); - static final XContentBuilderString FOUND = new XContentBuilderString("found"); - static final XContentBuilderString GET = new XContentBuilderString("get"); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java index 209ab686ce5..e583ed36274 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/delete/RestDeleteAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.delete; -import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; @@ -27,14 +26,13 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.DELETE; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; /** * @@ -62,31 +60,6 @@ public class RestDeleteAction extends BaseRestHandler { deleteRequest.consistencyLevel(WriteConsistencyLevel.fromString(consistencyLevel)); } - client.delete(deleteRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(DeleteResponse result, XContentBuilder builder) throws Exception { - ActionWriteResponse.ShardInfo shardInfo = result.getShardInfo(); - builder.startObject().field(Fields.FOUND, result.isFound()) - .field(Fields._INDEX, result.getIndex()) - .field(Fields._TYPE, result.getType()) - .field(Fields._ID, result.getId()) - .field(Fields._VERSION, result.getVersion()) - .value(shardInfo) - .endObject(); - RestStatus status = shardInfo.status(); - if (!result.isFound()) { - status = NOT_FOUND; - } - return new BytesRestResponse(status, builder); - } - }); - } - - static final class Fields { - static final XContentBuilderString FOUND = new XContentBuilderString("found"); - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); + client.delete(deleteRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java index c7fc29155cc..310ce0a1248 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/index/RestIndexAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.index; -import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -27,11 +26,11 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import java.io.IOException; @@ -99,33 +98,6 @@ public class RestIndexAction extends BaseRestHandler { if (consistencyLevel != null) { indexRequest.consistencyLevel(WriteConsistencyLevel.fromString(consistencyLevel)); } - client.index(indexRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(IndexResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo(); - builder.field(Fields._INDEX, response.getIndex()) - .field(Fields._TYPE, response.getType()) - .field(Fields._ID, response.getId()) - .field(Fields._VERSION, response.getVersion()); - shardInfo.toXContent(builder, request); - builder.field(Fields.CREATED, response.isCreated()); - builder.endObject(); - RestStatus status = shardInfo.status(); - if (response.isCreated()) { - status = CREATED; - } - return new BytesRestResponse(status, builder); - } - }); + client.index(indexRequest, new RestStatusToXContentListener<>(channel)); } - - static final class Fields { - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); - static final XContentBuilderString CREATED = new XContentBuilderString("created"); - } - } diff --git a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java index 76e96ab7e7f..f59c329fbc3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/update/RestUpdateAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.update; -import org.elasticsearch.action.ActionWriteResponse; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -29,7 +28,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -40,6 +38,7 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestBuilderListener; +import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; @@ -48,7 +47,6 @@ import java.util.HashMap; import java.util.Map; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.CREATED; /** */ @@ -123,38 +121,6 @@ public class RestUpdateAction extends BaseRestHandler { } } - client.update(updateRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(UpdateResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo(); - builder.field(Fields._INDEX, response.getIndex()) - .field(Fields._TYPE, response.getType()) - .field(Fields._ID, response.getId()) - .field(Fields._VERSION, response.getVersion()); - - shardInfo.toXContent(builder, request); - if (response.getGetResult() != null) { - builder.startObject(Fields.GET); - response.getGetResult().toXContentEmbedded(builder, request); - builder.endObject(); - } - - builder.endObject(); - RestStatus status = shardInfo.status(); - if (response.isCreated()) { - status = CREATED; - } - return new BytesRestResponse(status, builder); - } - }); - } - - static final class Fields { - static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); - static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); - static final XContentBuilderString _ID = new XContentBuilderString("_id"); - static final XContentBuilderString _VERSION = new XContentBuilderString("_version"); - static final XContentBuilderString GET = new XContentBuilderString("get"); + client.update(updateRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index d31a024187c..4d17155f611 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.support.replication; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; @@ -101,7 +101,7 @@ public class BroadcastReplicationTests extends ESTestCase { randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); - for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { + for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { if (randomBoolean()) { shardRequests.v2().onFailure(new NoShardAvailableActionException(shardRequests.v1())); } else { @@ -120,10 +120,10 @@ public class BroadcastReplicationTests extends ESTestCase { ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); - for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { - ActionWriteResponse actionWriteResponse = new ActionWriteResponse(); - actionWriteResponse.setShardInfo(new ActionWriteResponse.ShardInfo(1, 1, new ActionWriteResponse.ShardInfo.Failure[0])); - shardRequests.v2().onResponse(actionWriteResponse); + for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { + ReplicationResponse replicationResponse = new ReplicationResponse(); + replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, new ReplicationResponse.ShardInfo.Failure[0])); + shardRequests.v2().onResponse(replicationResponse); } logger.info("total shards: {}, ", response.get().getTotalShards()); assertBroadcastResponse(1, 1, 0, response.get(), null); @@ -137,20 +137,20 @@ public class BroadcastReplicationTests extends ESTestCase { Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); int succeeded = 0; int failed = 0; - for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { + for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { if (randomBoolean()) { - ActionWriteResponse.ShardInfo.Failure[] failures = new ActionWriteResponse.ShardInfo.Failure[0]; + ReplicationResponse.ShardInfo.Failure[] failures = new ReplicationResponse.ShardInfo.Failure[0]; int shardsSucceeded = randomInt(1) + 1; succeeded += shardsSucceeded; - ActionWriteResponse actionWriteResponse = new ActionWriteResponse(); + ReplicationResponse replicationResponse = new ReplicationResponse(); if (shardsSucceeded == 1 && randomBoolean()) { //sometimes add failure (no failure means shard unavailable) - failures = new ActionWriteResponse.ShardInfo.Failure[1]; - failures[0] = new ActionWriteResponse.ShardInfo.Failure(index, shardRequests.v1().id(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false); + failures = new ReplicationResponse.ShardInfo.Failure[1]; + failures[0] = new ReplicationResponse.ShardInfo.Failure(index, shardRequests.v1().id(), null, new Exception("pretend shard failed"), RestStatus.GATEWAY_TIMEOUT, false); failed++; } - actionWriteResponse.setShardInfo(new ActionWriteResponse.ShardInfo(2, shardsSucceeded, failures)); - shardRequests.v2().onResponse(actionWriteResponse); + replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(2, shardsSucceeded, failures)); + shardRequests.v2().onResponse(replicationResponse); } else { // sometimes fail failed += 2; @@ -179,16 +179,16 @@ public class BroadcastReplicationTests extends ESTestCase { assertThat(shards.get(0), equalTo(shardId)); } - private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { - protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); + private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { + protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { super("test-broadcast-replication-action", BroadcastRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } @Override - protected ActionWriteResponse newShardResponse() { - return new ActionWriteResponse(); + protected ReplicationResponse newShardResponse() { + return new ReplicationResponse(); } @Override @@ -202,7 +202,7 @@ public class BroadcastReplicationTests extends ESTestCase { } @Override - protected void shardExecute(BroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { + protected void shardExecute(BroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { capturedShardRequests.add(new Tuple<>(shardId, shardActionListener)); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index f1e270b40fc..5834b2662ad 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.support.replication; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.support.ActionFilter; @@ -521,7 +521,7 @@ public class TransportReplicationActionTests extends ESTestCase { } assertThat(listener.isDone(), equalTo(true)); Response response = listener.get(); - final ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo(); + final ReplicationResponse.ShardInfo shardInfo = response.getShardInfo(); assertThat(shardInfo.getFailed(), equalTo(criticalFailures)); assertThat(shardInfo.getFailures(), arrayWithSize(criticalFailures)); assertThat(shardInfo.getSuccessful(), equalTo(successful)); @@ -703,7 +703,7 @@ public class TransportReplicationActionTests extends ESTestCase { } } - static class Response extends ActionWriteResponse { + static class Response extends ReplicationResponse { } class Action extends TransportReplicationAction { diff --git a/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java b/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java index d4907d82128..4f28cf19d7b 100644 --- a/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java +++ b/core/src/test/java/org/elasticsearch/document/ShardInfoIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.document; -import org.elasticsearch.action.ActionWriteResponse; +import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -117,11 +117,11 @@ public class ShardInfoIT extends ESIntegTestCase { } } - private void assertShardInfo(ActionWriteResponse response) { + private void assertShardInfo(ReplicationResponse response) { assertShardInfo(response, numCopies, numNodes); } - private void assertShardInfo(ActionWriteResponse response, int expectedTotal, int expectedSuccessful) { + private void assertShardInfo(ReplicationResponse response, int expectedTotal, int expectedSuccessful) { assertThat(response.getShardInfo().getTotal(), greaterThanOrEqualTo(expectedTotal)); assertThat(response.getShardInfo().getSuccessful(), greaterThanOrEqualTo(expectedSuccessful)); } diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java index 2b708341c7f..c44608c4e4b 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchHit; @@ -225,7 +226,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { } else { deleted++; } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse("test", "type", String.valueOf(i), 1, delete)); + items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", 0), "type", String.valueOf(i), 1, delete)); } else { items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test", "type", String.valueOf(i), new Throwable("item failed"))); failed++; @@ -281,7 +282,7 @@ public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { deleted[0] = deleted[0] + 1; deleted[index] = deleted[index] + 1; } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse("test-" + index, "type", String.valueOf(i), 1, delete)); + items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, 0), "type", String.valueOf(i), 1, delete)); } else { items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test-" + index, "type", String.valueOf(i), new Throwable("item failed"))); failed[0] = failed[0] + 1;