diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1a4e5b58f33..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: java -jdk: - - openjdk7 - -env: - - ES_TEST_LOCAL=true - - ES_TEST_LOCAL=false - -notifications: - email: false diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5eea0b8c163..ce81f97548f 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -18,24 +18,18 @@ gradle assemble == Other test options -To disable and enable network transport, set the `Des.node.mode`. +To disable and enable network transport, set the `tests.es.node.mode` system property. Use network transport: ------------------------------------ --Des.node.mode=network +-Dtests.es.node.mode=network ------------------------------------ Use local transport (default since 1.3): ------------------------------------- --Des.node.mode=local -------------------------------------- - -Alternatively, you can set the `ES_TEST_LOCAL` environment variable: - -------------------------------------- -export ES_TEST_LOCAL=true && gradle test +-Dtests.es.node.mode=local ------------------------------------- === Running Elasticsearch from a checkout @@ -201,7 +195,7 @@ gradle test -Dtests.timeoutSuite=5000! ... Change the logging level of ES (not gradle) -------------------------------- -gradle test -Des.logger.level=DEBUG +gradle test -Dtests.es.logger.level=DEBUG -------------------------------- Print all the logging output from the test runs to the commandline diff --git a/build.gradle b/build.gradle index 3d52d4ab279..bf4b5a3b415 100644 --- a/build.gradle +++ b/build.gradle @@ -81,7 +81,7 @@ subprojects { nexus { String buildSnapshot = System.getProperty('build.snapshot', 'true') if (buildSnapshot == 'false') { - Repository repo = new RepositoryBuilder().findGitDir(new File('.')).build() + Repository repo = new RepositoryBuilder().findGitDir(project.rootDir).build() String shortHash = repo.resolve('HEAD')?.name?.substring(0,7) repositoryUrl = project.hasProperty('build.repository') ? project.property('build.repository') : "file://${System.getenv('HOME')}/elasticsearch-releases/${version}-${shortHash}/" } @@ -144,6 +144,14 @@ subprojects { // see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959 javadoc.options.encoding='UTF8' javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet') + /* + TODO: building javadocs with java 9 b118 is currently broken with weird errors, so + for now this is commented out...try again with the next ea build... + javadoc.executable = new File(project.javaHome, 'bin/javadoc') + if (project.javaVersion == JavaVersion.VERSION_1_9) { + // TODO: remove this hack! gradle should be passing this... + javadoc.options.addStringOption('source', '8') + }*/ } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index b286124bfee..623fdab3e3e 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -84,7 +84,7 @@ dependencies { compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r' compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE.... - compile 'de.thetaphi:forbiddenapis:2.0' + compile 'de.thetaphi:forbiddenapis:2.1' compile 'com.bmuschko:gradle-nexus-plugin:2.3.1' compile 'org.apache.rat:apache-rat:0.11' } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 029c80b6e25..168ceca1d23 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -143,7 +143,7 @@ class BuildPlugin implements Plugin { } project.rootProject.ext.javaHome = javaHome - project.rootProject.ext.javaVersion = javaVersion + project.rootProject.ext.javaVersion = javaVersionEnum project.rootProject.ext.buildChecksDone = true } project.targetCompatibility = minimumJava @@ -378,7 +378,7 @@ class BuildPlugin implements Plugin { * -serial because we don't use java serialization. */ // don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :) - options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial' << '-Xdoclint:all' << '-Xdoclint:-missing' + options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options' << '-Xdoclint:all' << '-Xdoclint:-missing' // compile with compact 3 profile by default // NOTE: this is just a compile time check: does not replace testing with a compact3 JRE if (project.compactProfile != 'full') { @@ -387,10 +387,13 @@ class BuildPlugin implements Plugin { options.encoding = 'UTF-8' //options.incremental = true - // gradle ignores target/source compatibility when it is "unnecessary", but since to compile with - // java 9, gradle is running in java 8, it incorrectly thinks it is unnecessary - assert minimumJava == JavaVersion.VERSION_1_8 - options.compilerArgs << '-target' << '1.8' << '-source' << '1.8' + if (project.javaVersion == JavaVersion.VERSION_1_9) { + // hack until gradle supports java 9's new "-release" arg + assert minimumJava == JavaVersion.VERSION_1_8 + options.compilerArgs << '-release' << '8' + project.sourceCompatibility = null + project.targetCompatibility = null + } } } } @@ -456,7 +459,7 @@ class BuildPlugin implements Plugin { // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' // TODO: remove setting logging level via system property - systemProperty 'es.logger.level', 'WARN' + systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('tests.') || property.getKey().startsWith('es.')) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index ef8c8e280ed..9f840df36e1 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -87,6 +87,10 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { * calls buildTest to actually build the test. */ void handleSnippet(Snippet snippet) { + if (snippet.language == 'json') { + throw new InvalidUserDataException( + "$snippet: Use `js` instead of `json`.") + } if (snippet.testSetup) { setup(snippet) return diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 427d3191dc5..48a4d7c26dc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -62,9 +62,8 @@ class PrecommitTasks { private static Task configureForbiddenApis(Project project) { project.pluginManager.apply(ForbiddenApisPlugin.class) project.forbiddenApis { - internalRuntimeForbidden = true failOnUnsupportedJava = false - bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-system-out'] + bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-non-portable', 'jdk-system-out'] signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'), getClass().getResource('/forbidden/es-all-signatures.txt')] suppressAnnotations = ['**.SuppressForbidden'] diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy index 3ff5a06ad42..076a564f84a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy @@ -203,8 +203,7 @@ public class ThirdPartyAuditTask extends AntTask { Set sheistySet = getSheistyClasses(tmpDir.toPath()); try { - ant.thirdPartyAudit(internalRuntimeForbidden: false, - failOnUnsupportedJava: false, + ant.thirdPartyAudit(failOnUnsupportedJava: false, failOnMissingClasses: false, signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()), classpath: classpath.asPath) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 2ff5e333139..5d9961a0425 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,18 +129,18 @@ class NodeInfo { } env = [ 'JAVA_HOME' : project.javaHome ] - args.addAll("-E", "es.node.portsfile=true") + args.addAll("-E", "node.portsfile=true") String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs env.put('ES_JAVA_OPTS', esJavaOpts) for (Map.Entry property : System.properties.entrySet()) { - if (property.getKey().startsWith('es.')) { + if (property.key.startsWith('tests.es.')) { args.add("-E") - args.add("${property.getKey()}=${property.getValue()}") + args.add("${property.key.substring('tests.es.'.size())}=${property.value}") } } env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options')) - args.addAll("-E", "es.path.conf=${confDir}") + args.addAll("-E", "path.conf=${confDir}") if (Os.isFamily(Os.FAMILY_WINDOWS)) { args.add('"') // end the entire command, quoted } diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 48f07b1a2d5..63133dd851b 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -37,8 +37,6 @@ - - @@ -179,12 +177,6 @@ - - - - - - @@ -453,9 +445,6 @@ - - - @@ -520,7 +509,6 @@ - @@ -566,7 +554,6 @@ - @@ -745,7 +732,6 @@ - @@ -981,8 +967,6 @@ - - @@ -1071,9 +1055,6 @@ - - - @@ -1222,6 +1203,16 @@ + + + + + + + + + + @@ -1232,13 +1223,6 @@ - - - - - - - @@ -1309,6 +1293,7 @@ + @@ -1335,7 +1320,6 @@ - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index fee8404080a..6669abb90b3 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 5.0.0 +elasticsearch = 5.0.0-alpha3 lucene = 6.0.0 # optional dependencies @@ -13,9 +13,7 @@ jna = 4.1.0 # test dependencies randomizedrunner = 2.3.2 junit = 4.11 -# TODO: Upgrade httpclient to a version > 4.5.1 once released. Then remove o.e.test.rest.client.StrictHostnameVerifier* and use -# DefaultHostnameVerifier instead since we no longer need to workaround https://issues.apache.org/jira/browse/HTTPCLIENT-1698 -httpclient = 4.3.6 -httpcore = 4.3.3 +httpclient = 4.5.2 +httpcore = 4.4.4 commonslogging = 1.1.3 commonscodec = 1.10 diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorException.java b/core/src/main/java/org/apache/log4j/Java9Hack.java similarity index 59% rename from core/src/main/java/org/elasticsearch/index/percolator/PercolatorException.java rename to core/src/main/java/org/apache/log4j/Java9Hack.java index 3813679d81c..831cf5b35ae 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorException.java +++ b/core/src/main/java/org/apache/log4j/Java9Hack.java @@ -16,25 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.index.Index; +package org.apache.log4j; -import java.io.IOException; +import org.apache.log4j.helpers.ThreadLocalMap; /** - * Exception during indexing a percolator query. + * Log4j 1.2 MDC breaks because it parses java.version incorrectly (does not handle new java9 versioning). + * + * This hack fixes up the pkg private members as if it had detected the java version correctly. */ -public class PercolatorException extends ElasticsearchException { +public class Java9Hack { - public PercolatorException(Index index, String msg, Throwable cause) { - super(msg, cause); - setIndex(index); - } - - public PercolatorException(StreamInput in) throws IOException{ - super(in); + public static void fixLog4j() { + if (MDC.mdc.tlm == null) { + MDC.mdc.java1 = false; + MDC.mdc.tlm = new ThreadLocalMap(); + } } } diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index b242811b7be..1faf6cd4237 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -21,7 +21,6 @@ package org.elasticsearch; import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -681,8 +680,6 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.index.shard.IndexShardRecoveryException::new, 106), REPOSITORY_MISSING_EXCEPTION(org.elasticsearch.repositories.RepositoryMissingException.class, org.elasticsearch.repositories.RepositoryMissingException::new, 107), - PERCOLATOR_EXCEPTION(org.elasticsearch.index.percolator.PercolatorException.class, - org.elasticsearch.index.percolator.PercolatorException::new, 108), DOCUMENT_SOURCE_MISSING_EXCEPTION(org.elasticsearch.index.engine.DocumentSourceMissingException.class, org.elasticsearch.index.engine.DocumentSourceMissingException::new, 109), FLUSH_NOT_ALLOWED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.FlushNotAllowedEngineException.class, diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 9cc526d8f97..b2b4e9c296d 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -75,9 +75,9 @@ public class Version { public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); public static final int V_5_0_0_alpha2_ID = 5000002; public static final Version V_5_0_0_alpha2 = new Version(V_5_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final int V_5_0_0_ID = 5000099; - public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final Version CURRENT = V_5_0_0; + public static final int V_5_0_0_alpha3_ID = 5000003; + public static final Version V_5_0_0_alpha3 = new Version(V_5_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); + public static final Version CURRENT = V_5_0_0_alpha3; static { assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to [" @@ -90,8 +90,8 @@ public class Version { public static Version fromId(int id) { switch (id) { - case V_5_0_0_ID: - return V_5_0_0; + case V_5_0_0_alpha3_ID: + return V_5_0_0_alpha3; case V_5_0_0_alpha2_ID: return V_5_0_0_alpha2; case V_5_0_0_alpha1_ID: diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 3e93f699645..bab3dcb2ed2 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -165,10 +165,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineTransportAction; import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.main.TransportMainAction; -import org.elasticsearch.action.percolate.MultiPercolateAction; -import org.elasticsearch.action.percolate.PercolateAction; -import org.elasticsearch.action.percolate.TransportMultiPercolateAction; -import org.elasticsearch.action.percolate.TransportPercolateAction; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; @@ -332,8 +328,6 @@ public class ActionModule extends AbstractModule { registerAction(SearchAction.INSTANCE, TransportSearchAction.class); registerAction(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); registerAction(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); - registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); - registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class); registerAction(ExplainAction.INSTANCE, TransportExplainAction.class); registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); registerAction(RecoveryAction.INSTANCE, TransportRecoveryAction.class); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java index dbabe681c7a..9ca0efd6c56 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanation.java @@ -42,15 +42,18 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable private final ShardId shard; private final boolean primary; + private final boolean hasPendingAsyncFetch; private final String assignedNodeId; private final UnassignedInfo unassignedInfo; private final long remainingDelayMillis; private final Map nodeExplanations; public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId, long remainingDelayMillis, - @Nullable UnassignedInfo unassignedInfo, Map nodeExplanations) { + @Nullable UnassignedInfo unassignedInfo, boolean hasPendingAsyncFetch, + Map nodeExplanations) { this.shard = shard; this.primary = primary; + this.hasPendingAsyncFetch = hasPendingAsyncFetch; this.assignedNodeId = assignedNodeId; this.unassignedInfo = unassignedInfo; this.remainingDelayMillis = remainingDelayMillis; @@ -60,6 +63,7 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable public ClusterAllocationExplanation(StreamInput in) throws IOException { this.shard = ShardId.readShardId(in); this.primary = in.readBoolean(); + this.hasPendingAsyncFetch = in.readBoolean(); this.assignedNodeId = in.readOptionalString(); this.unassignedInfo = in.readOptionalWriteable(UnassignedInfo::new); this.remainingDelayMillis = in.readVLong(); @@ -77,6 +81,7 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable public void writeTo(StreamOutput out) throws IOException { this.getShard().writeTo(out); out.writeBoolean(this.isPrimary()); + out.writeBoolean(this.isStillFetchingShardData()); out.writeOptionalString(this.getAssignedNodeId()); out.writeOptionalWriteable(this.getUnassignedInfo()); out.writeVLong(remainingDelayMillis); @@ -97,6 +102,11 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable return this.primary; } + /** Return turn if shard data is still being fetched for the allocation */ + public boolean isStillFetchingShardData() { + return this.hasPendingAsyncFetch; + } + /** Return turn if the shard is assigned to a node */ public boolean isAssigned() { return this.assignedNodeId != null; @@ -138,6 +148,7 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable if (assignedNodeId != null) { builder.field("assigned_node_id", this.assignedNodeId); } + builder.field("shard_state_fetch_pending", this.hasPendingAsyncFetch); // If we have unassigned info, show that if (unassignedInfo != null) { unassignedInfo.toXContent(builder, params); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java index 28b62083d42..46a4d1795ba 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportClusterAllocationExplainAction.java @@ -50,6 +50,7 @@ import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -69,19 +70,22 @@ public class TransportClusterAllocationExplainAction private final AllocationDeciders allocationDeciders; private final ShardsAllocator shardAllocator; private final TransportIndicesShardStoresAction shardStoresAction; + private final GatewayAllocator gatewayAllocator; @Inject public TransportClusterAllocationExplainAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterInfoService clusterInfoService, AllocationDeciders allocationDeciders, - ShardsAllocator shardAllocator, TransportIndicesShardStoresAction shardStoresAction) { + ShardsAllocator shardAllocator, TransportIndicesShardStoresAction shardStoresAction, + GatewayAllocator gatewayAllocator) { super(settings, ClusterAllocationExplainAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterAllocationExplainRequest::new); this.clusterInfoService = clusterInfoService; this.allocationDeciders = allocationDeciders; this.shardAllocator = shardAllocator; this.shardStoresAction = shardStoresAction; + this.gatewayAllocator = gatewayAllocator; } @Override @@ -130,7 +134,8 @@ public class TransportClusterAllocationExplainAction Float nodeWeight, IndicesShardStoresResponse.StoreStatus storeStatus, String assignedNodeId, - Set activeAllocationIds) { + Set activeAllocationIds, + boolean hasPendingAsyncFetch) { final ClusterAllocationExplanation.FinalDecision finalDecision; final ClusterAllocationExplanation.StoreCopy storeCopy; final String finalExplanation; @@ -161,6 +166,19 @@ public class TransportClusterAllocationExplainAction if (node.getId().equals(assignedNodeId)) { finalDecision = ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED; finalExplanation = "the shard is already assigned to this node"; + } else if (hasPendingAsyncFetch && + shard.primary() == false && + shard.unassigned() && + shard.allocatedPostIndexCreate(indexMetaData) && + nodeDecision.type() != Decision.Type.YES) { + finalExplanation = "the shard cannot be assigned because allocation deciders return a " + nodeDecision.type().name() + + " decision and the shard's state is still being fetched"; + finalDecision = ClusterAllocationExplanation.FinalDecision.NO; + } else if (hasPendingAsyncFetch && + shard.unassigned() && + shard.allocatedPostIndexCreate(indexMetaData)) { + finalExplanation = "the shard's state is still being fetched so it cannot be allocated"; + finalDecision = ClusterAllocationExplanation.FinalDecision.NO; } else if (shard.primary() && shard.unassigned() && shard.allocatedPostIndexCreate(indexMetaData) && storeCopy == ClusterAllocationExplanation.StoreCopy.STALE) { finalExplanation = "the copy of the shard is stale, allocation ids do not match"; @@ -180,6 +198,7 @@ public class TransportClusterAllocationExplainAction finalDecision = ClusterAllocationExplanation.FinalDecision.NO; finalExplanation = "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision"; } else { + // TODO: handle throttling decision better here finalDecision = ClusterAllocationExplanation.FinalDecision.YES; if (storeCopy == ClusterAllocationExplanation.StoreCopy.AVAILABLE) { finalExplanation = "the shard can be assigned and the node contains a valid copy of the shard data"; @@ -198,7 +217,8 @@ public class TransportClusterAllocationExplainAction */ public static ClusterAllocationExplanation explainShard(ShardRouting shard, RoutingAllocation allocation, RoutingNodes routingNodes, boolean includeYesDecisions, ShardsAllocator shardAllocator, - List shardStores) { + List shardStores, + GatewayAllocator gatewayAllocator) { // don't short circuit deciders, we want a full explanation allocation.debugDecision(true); // get the existing unassigned info if available @@ -238,11 +258,12 @@ public class TransportClusterAllocationExplainAction Float weight = weights.get(node); IndicesShardStoresResponse.StoreStatus storeStatus = nodeToStatus.get(node); NodeExplanation nodeExplanation = calculateNodeExplanation(shard, indexMetaData, node, decision, weight, - storeStatus, shard.currentNodeId(), indexMetaData.activeAllocationIds(shard.getId())); + storeStatus, shard.currentNodeId(), indexMetaData.activeAllocationIds(shard.getId()), + allocation.hasPendingAsyncFetch()); explanations.put(node, nodeExplanation); } - return new ClusterAllocationExplanation(shard.shardId(), shard.primary(), - shard.currentNodeId(), remainingDelayMillis, ui, explanations); + return new ClusterAllocationExplanation(shard.shardId(), shard.primary(), shard.currentNodeId(), + remainingDelayMillis, ui, gatewayAllocator.hasFetchPending(shard.shardId(), shard.primary()), explanations); } @Override @@ -250,7 +271,7 @@ public class TransportClusterAllocationExplainAction final ActionListener listener) { final RoutingNodes routingNodes = state.getRoutingNodes(); final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state, - clusterInfoService.getClusterInfo(), System.nanoTime()); + clusterInfoService.getClusterInfo(), System.nanoTime(), false); ShardRouting foundShard = null; if (request.useAnyUnassignedShard()) { @@ -297,7 +318,7 @@ public class TransportClusterAllocationExplainAction shardStoreResponse.getStoreStatuses().get(shardRouting.getIndexName()); List shardStoreStatus = shardStatuses.get(shardRouting.id()); ClusterAllocationExplanation cae = explainShard(shardRouting, allocation, routingNodes, - request.includeYesDecisions(), shardAllocator, shardStoreStatus); + request.includeYesDecisions(), shardAllocator, shardStoreStatus, gatewayAllocator); listener.onResponse(new ClusterAllocationExplainResponse(cae)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java index a241f01ea28..4ec729bbc53 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java @@ -19,28 +19,24 @@ package org.elasticsearch.action.admin.cluster.reroute; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; -import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandRegistry; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; /** * Request to submit cluster reroute allocation commands */ public class ClusterRerouteRequest extends AcknowledgedRequest { - AllocationCommands commands = new AllocationCommands(); - boolean dryRun; - boolean explain; + private AllocationCommands commands = new AllocationCommands(); + private boolean dryRun; + private boolean explain; + private boolean retryFailed; public ClusterRerouteRequest() { } @@ -81,6 +77,15 @@ public class ClusterRerouteRequest extends AcknowledgedRequestfalse). If true, the + * request will retry allocating shards that can't currently be allocated due to too many allocation failures. + */ + public ClusterRerouteRequest setRetryFailed(boolean retryFailed) { + this.retryFailed = retryFailed; + return this; + } + /** * Returns the current explain flag */ @@ -88,41 +93,27 @@ public class ClusterRerouteRequest extends AcknowledgedRequest { - +public class ClusterRerouteRequestBuilder + extends AcknowledgedRequestBuilder { public ClusterRerouteRequestBuilder(ElasticsearchClient client, ClusterRerouteAction action) { super(client, action, new ClusterRerouteRequest()); } @@ -61,10 +60,11 @@ public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilderfalse). If true, the + * request will retry allocating shards that can't currently be allocated due to too many allocation failures. */ - public ClusterRerouteRequestBuilder setCommands(AllocationCommand... commands) throws Exception { - request.commands(commands); + public ClusterRerouteRequestBuilder setRetryFailed(boolean retryFailed) { + request.setRetryFailed(retryFailed); return this; } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java index e6116dbfbc4..b0b676f6e2e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/reroute/TransportClusterRerouteAction.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -68,38 +69,55 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction listener) { - clusterService.submitStateUpdateTask("cluster_reroute (api)", new AckedClusterStateUpdateTask(Priority.IMMEDIATE, request, listener) { - - private volatile ClusterState clusterStateToSend; - private volatile RoutingExplanations explanations; - - @Override - protected ClusterRerouteResponse newResponse(boolean acknowledged) { - return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations); - } - - @Override - public void onAckTimeout() { - listener.onResponse(new ClusterRerouteResponse(false, clusterStateToSend, new RoutingExplanations())); - } - - @Override - public void onFailure(String source, Throwable t) { - logger.debug("failed to perform [{}]", t, source); - super.onFailure(source, t); - } - - @Override - public ClusterState execute(ClusterState currentState) { - RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.commands, request.explain()); - ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build(); - clusterStateToSend = newState; - explanations = routingResult.explanations(); - if (request.dryRun) { - return currentState; - } - return newState; - } - }); + clusterService.submitStateUpdateTask("cluster_reroute (api)", new ClusterRerouteResponseAckedClusterStateUpdateTask(logger, + allocationService, request, listener)); } -} \ No newline at end of file + + static class ClusterRerouteResponseAckedClusterStateUpdateTask extends AckedClusterStateUpdateTask { + + private final ClusterRerouteRequest request; + private final ActionListener listener; + private final ESLogger logger; + private final AllocationService allocationService; + private volatile ClusterState clusterStateToSend; + private volatile RoutingExplanations explanations; + + ClusterRerouteResponseAckedClusterStateUpdateTask(ESLogger logger, AllocationService allocationService, ClusterRerouteRequest request, + ActionListener listener) { + super(Priority.IMMEDIATE, request, listener); + this.request = request; + this.listener = listener; + this.logger = logger; + this.allocationService = allocationService; + } + + @Override + protected ClusterRerouteResponse newResponse(boolean acknowledged) { + return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations); + } + + @Override + public void onAckTimeout() { + listener.onResponse(new ClusterRerouteResponse(false, clusterStateToSend, new RoutingExplanations())); + } + + @Override + public void onFailure(String source, Throwable t) { + logger.debug("failed to perform [{}]", t, source); + super.onFailure(source, t); + } + + @Override + public ClusterState execute(ClusterState currentState) { + RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.getCommands(), request.explain(), + request.isRetryFailed()); + ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build(); + clusterStateToSend = newState; + explanations = routingResult.explanations(); + if (request.dryRun()) { + return currentState; + } + return newState; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 0a7a8a9ce80..efc2fbeb5b5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -81,18 +81,13 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent return snapshotInfo.status(); } - static final class Fields { - static final String SNAPSHOT = "snapshot"; - static final String ACCEPTED = "accepted"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (snapshotInfo != null) { - builder.field(Fields.SNAPSHOT); - snapshotInfo.toExternalXContent(builder, params); + builder.field("snapshot"); + snapshotInfo.toXContent(builder, params); } else { - builder.field(Fields.ACCEPTED, true); + builder.field("accepted", true); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java index a5db19684b2..ec996e6d366 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsResponse.java @@ -74,15 +74,11 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent { } } - static final class Fields { - static final String SNAPSHOTS = "snapshots"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startArray(Fields.SNAPSHOTS); + builder.startArray("snapshots"); for (SnapshotInfo snapshotInfo : snapshots) { - snapshotInfo.toExternalXContent(builder, params); + snapshotInfo.toXContent(builder, params); } builder.endArray(); return builder; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java index a54c01ed15a..70f4f2aa4f2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotResponse.java @@ -73,18 +73,13 @@ public class RestoreSnapshotResponse extends ActionResponse implements ToXConten return restoreInfo.status(); } - static final class Fields { - static final String SNAPSHOT = "snapshot"; - static final String ACCEPTED = "accepted"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { if (restoreInfo != null) { - builder.field(Fields.SNAPSHOT); + builder.field("snapshot"); restoreInfo.toXContent(builder, params); } else { - builder.field(Fields.ACCEPTED, true); + builder.field("accepted", true); } return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java index 34e503224ce..b9800a2d9ed 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java @@ -73,13 +73,9 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten } } - static final class Fields { - static final String SNAPSHOTS = "snapshots"; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(Fields.SNAPSHOTS); + builder.startArray("snapshots"); for (SnapshotStatus snapshot : snapshots) { snapshot.toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index 8c0c427beea..9a7bb5c8f3d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.search.suggest.completion.CompletionStats; @@ -45,7 +44,6 @@ public class ClusterStatsIndices implements ToXContent { private QueryCacheStats queryCache; private CompletionStats completion; private SegmentsStats segments; - private PercolatorQueryCacheStats percolatorCache; public ClusterStatsIndices(List nodeResponses) { ObjectObjectHashMap countsPerIndex = new ObjectObjectHashMap<>(); @@ -56,7 +54,6 @@ public class ClusterStatsIndices implements ToXContent { this.queryCache = new QueryCacheStats(); this.completion = new CompletionStats(); this.segments = new SegmentsStats(); - this.percolatorCache = new PercolatorQueryCacheStats(); for (ClusterStatsNodeResponse r : nodeResponses) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { @@ -79,7 +76,6 @@ public class ClusterStatsIndices implements ToXContent { queryCache.add(shardCommonStats.queryCache); completion.add(shardCommonStats.completion); segments.add(shardCommonStats.segments); - percolatorCache.add(shardCommonStats.percolatorCache); } } @@ -122,10 +118,6 @@ public class ClusterStatsIndices implements ToXContent { return segments; } - public PercolatorQueryCacheStats getPercolatorCache() { - return percolatorCache; - } - static final class Fields { static final String COUNT = "count"; } @@ -140,7 +132,6 @@ public class ClusterStatsIndices implements ToXContent { queryCache.toXContent(builder, params); completion.toXContent(builder, params); segments.toXContent(builder, params); - percolatorCache.toXContent(builder, params); return builder; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index bae7b20694d..3a0b1455209 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -55,8 +55,7 @@ public class TransportClusterStatsAction extends TransportNodesAction { private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, - CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, - CommonStatsFlags.Flag.PercolatorCache); + CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments); private final NodeService nodeService; private final IndicesService indicesService; @@ -100,7 +99,7 @@ public class TransportClusterStatsAction extends TransportNodesAction { return flags.isSet(Flag.FieldData); } - public IndicesStatsRequest percolate(boolean percolate) { - flags.set(Flag.PercolatorCache, percolate); - return this; - } - - public boolean percolate() { - return flags.isSet(Flag.PercolatorCache); - } - public IndicesStatsRequest segments(boolean segments) { flags.set(Flag.Segments, segments); return this; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java index cad919cbd18..8e7afe3e7e3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java @@ -127,11 +127,6 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder return this; } - public IndicesStatsRequestBuilder setPercolate(boolean percolate) { - request.percolate(percolate); - return this; - } - public IndicesStatsRequestBuilder setSegments(boolean segments) { request.segments(segments); return this; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 8c12dfa9fda..7e8ccd30a8a 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -139,9 +139,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.set(CommonStatsFlags.Flag.FieldData); flags.fieldDataFields(request.fieldDataFields()); } - if (request.percolate()) { - flags.set(CommonStatsFlags.Flag.PercolatorCache); - } if (request.segments()) { flags.set(CommonStatsFlags.Flag.Segments); flags.includeSegmentFileSizes(request.includeSegmentFileSizes()); @@ -163,6 +160,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< flags.set(CommonStatsFlags.Flag.Recovery); } - return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()); + return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats()); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java b/core/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java index ee0ba95b2b1..2ddb35e1357 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java +++ b/core/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java @@ -19,8 +19,6 @@ package org.elasticsearch.action.search; -import java.util.Map; - /** * */ @@ -36,13 +34,10 @@ class ParsedScrollId { private final ScrollIdForNode[] context; - private final Map attributes; - - public ParsedScrollId(String source, String type, ScrollIdForNode[] context, Map attributes) { + public ParsedScrollId(String source, String type, ScrollIdForNode[] context) { this.source = source; this.type = type; this.context = context; - this.attributes = attributes; } public String getSource() { @@ -56,8 +51,4 @@ class ParsedScrollId { public ScrollIdForNode[] getContext() { return context; } - - public Map getAttributes() { - return this.attributes; - } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java index f7cb72b22e9..7cf608559ce 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchDfsQueryAndFetchAsyncAction.java @@ -123,7 +123,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction searchPhaseResults, - @Nullable Map attributes) throws IOException { + static String buildScrollId(SearchType searchType, AtomicArray searchPhaseResults) throws IOException { if (searchType == SearchType.DFS_QUERY_THEN_FETCH || searchType == SearchType.QUERY_THEN_FETCH) { - return buildScrollId(ParsedScrollId.QUERY_THEN_FETCH_TYPE, searchPhaseResults, attributes); + return buildScrollId(ParsedScrollId.QUERY_THEN_FETCH_TYPE, searchPhaseResults); } else if (searchType == SearchType.QUERY_AND_FETCH || searchType == SearchType.DFS_QUERY_AND_FETCH) { - return buildScrollId(ParsedScrollId.QUERY_AND_FETCH_TYPE, searchPhaseResults, attributes); + return buildScrollId(ParsedScrollId.QUERY_AND_FETCH_TYPE, searchPhaseResults); } else { throw new IllegalStateException("search_type [" + searchType + "] not supported"); } } - static String buildScrollId(String type, AtomicArray searchPhaseResults, - @Nullable Map attributes) throws IOException { - StringBuilder sb = new StringBuilder().append(type).append(';'); - sb.append(searchPhaseResults.asList().size()).append(';'); - for (AtomicArray.Entry entry : searchPhaseResults.asList()) { - SearchPhaseResult searchPhaseResult = entry.value; - sb.append(searchPhaseResult.id()).append(':').append(searchPhaseResult.shardTarget().nodeId()).append(';'); - } - if (attributes == null) { - sb.append("0;"); - } else { - sb.append(attributes.size()).append(";"); - for (Map.Entry entry : attributes.entrySet()) { - sb.append(entry.getKey()).append(':').append(entry.getValue()).append(';'); + static String buildScrollId(String type, AtomicArray searchPhaseResults) throws IOException { + try (RAMOutputStream out = new RAMOutputStream()) { + out.writeString(type); + out.writeVInt(searchPhaseResults.asList().size()); + for (AtomicArray.Entry entry : searchPhaseResults.asList()) { + SearchPhaseResult searchPhaseResult = entry.value; + out.writeLong(searchPhaseResult.id()); + out.writeString(searchPhaseResult.shardTarget().nodeId()); } + byte[] bytes = new byte[(int) out.getFilePointer()]; + out.writeTo(bytes, 0); + return Base64.getUrlEncoder().encodeToString(bytes); } - BytesRef bytesRef = new BytesRef(sb); - return Base64.encodeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length, Base64.URL_SAFE); } static ParsedScrollId parseScrollId(String scrollId) { - CharsRefBuilder spare = new CharsRefBuilder(); try { - byte[] decode = Base64.decode(scrollId, Base64.URL_SAFE); - spare.copyUTF8Bytes(decode, 0, decode.length); + byte[] bytes = Base64.getUrlDecoder().decode(scrollId); + ByteArrayDataInput in = new ByteArrayDataInput(bytes); + String type = in.readString(); + ScrollIdForNode[] context = new ScrollIdForNode[in.readVInt()]; + for (int i = 0; i < context.length; ++i) { + long id = in.readLong(); + String target = in.readString(); + context[i] = new ScrollIdForNode(target, id); + } + if (in.getPosition() != bytes.length) { + throw new IllegalArgumentException("Not all bytes were read"); + } + return new ParsedScrollId(scrollId, type, context); } catch (Exception e) { - throw new IllegalArgumentException("Failed to decode scrollId", e); + throw new IllegalArgumentException("Cannot parse scroll id", e); } - String[] elements = spare.get().toString().split(";"); - if (elements.length < 2) { - throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); - } - - int index = 0; - String type = elements[index++]; - int contextSize = Integer.parseInt(elements[index++]); - if (elements.length < contextSize + 2) { - throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); - } - - ScrollIdForNode[] context = new ScrollIdForNode[contextSize]; - for (int i = 0; i < contextSize; i++) { - String element = elements[index++]; - int sep = element.indexOf(':'); - if (sep == -1) { - throw new IllegalArgumentException("Malformed scrollId [" + scrollId + "]"); - } - context[i] = new ScrollIdForNode(element.substring(sep + 1), Long.parseLong(element.substring(0, sep))); - } - Map attributes; - int attributesSize = Integer.parseInt(elements[index++]); - if (attributesSize == 0) { - attributes = emptyMap(); - } else { - attributes = new HashMap<>(attributesSize); - for (int i = 0; i < attributesSize; i++) { - String element = elements[index++]; - int sep = element.indexOf(':'); - attributes.put(element.substring(0, sep), element.substring(sep + 1)); - } - } - return new ParsedScrollId(scrollId, type, context, attributes); } private TransportSearchHelper() { diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 6d35cafd088..305a4fd30ae 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -177,15 +177,7 @@ final class Bootstrap { // install SM after natives, shutdown hooks, etc. Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings)); - // We do not need to reload system properties here as we have already applied them in building the settings and - // reloading could cause multiple prompts to the user for values if a system property was specified with a prompt - // placeholder - Settings nodeSettings = Settings.builder() - .put(settings) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) - .build(); - - node = new Node(nodeSettings) { + node = new Node(settings) { @Override protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) { BootstrapCheck.check(settings, boundTransportAddress); @@ -193,13 +185,13 @@ final class Bootstrap { }; } - private static Environment initialSettings(boolean foreground, String pidFile) { + private static Environment initialSettings(boolean foreground, String pidFile, Map esSettings) { Terminal terminal = foreground ? Terminal.DEFAULT : null; Settings.Builder builder = Settings.builder(); if (Strings.hasLength(pidFile)) { builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); } - return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal); + return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings); } private void start() { @@ -233,11 +225,13 @@ final class Bootstrap { // Set the system property before anything has a chance to trigger its use initLoggerPrefix(); - elasticsearchSettings(esSettings); + // force the class initializer for BootstrapInfo to run before + // the security manager is installed + BootstrapInfo.init(); INSTANCE = new Bootstrap(); - Environment environment = initialSettings(foreground, pidFile); + Environment environment = initialSettings(foreground, pidFile, esSettings); Settings settings = environment.settings(); LogConfigurator.configure(settings, true); checkForCustomConfFile(); @@ -295,13 +289,6 @@ final class Bootstrap { } } - @SuppressForbidden(reason = "Sets system properties passed as CLI parameters") - private static void elasticsearchSettings(Map esSettings) { - for (Map.Entry esSetting : esSettings.entrySet()) { - System.setProperty(esSetting.getKey(), esSetting.getValue()); - } - } - @SuppressForbidden(reason = "System#out") private static void closeSystOut() { System.out.close(); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java index bd693951eb2..791836bf8a4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java @@ -120,4 +120,8 @@ public final class BootstrapInfo { } return SYSTEM_PROPERTIES; } + + public static void init() { + } + } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index bb1f6cc87d5..b3259129473 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -21,28 +21,25 @@ package org.elasticsearch.bootstrap; import joptsimple.OptionSet; import joptsimple.OptionSpec; -import joptsimple.util.KeyValuePair; import org.elasticsearch.Build; -import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.monitor.jvm.JvmInfo; import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.Map; /** * This class starts elasticsearch. */ -class Elasticsearch extends Command { +class Elasticsearch extends SettingCommand { private final OptionSpec versionOption; private final OptionSpec daemonizeOption; private final OptionSpec pidfileOption; - private final OptionSpec propertyOption; // visible for testing Elasticsearch() { @@ -56,7 +53,6 @@ class Elasticsearch extends Command { pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), "Creates a pid file in the specified path on start") .withRequiredArg(); - propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class); } /** @@ -75,7 +71,7 @@ class Elasticsearch extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { if (options.nonOptionArguments().isEmpty() == false) { throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments()); } @@ -84,26 +80,15 @@ class Elasticsearch extends Command { throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option"); } terminal.println("Version: " + org.elasticsearch.Version.CURRENT - + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() - + ", JVM: " + JvmInfo.jvmInfo().version()); + + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + + ", JVM: " + JvmInfo.jvmInfo().version()); return; } final boolean daemonize = options.has(daemonizeOption); final String pidFile = pidfileOption.value(options); - final Map esSettings = new HashMap<>(); - for (final KeyValuePair kvp : propertyOption.values(options)) { - if (!kvp.key.startsWith("es.")) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]"); - } - if (kvp.value.isEmpty()) { - throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty"); - } - esSettings.put(kvp.key, kvp.value); - } - - init(daemonize, pidFile, esSettings); + init(daemonize, pidFile, settings); } void init(final boolean daemonize, final String pidFile, final Map esSettings) { diff --git a/core/src/main/java/org/elasticsearch/cli/Command.java b/core/src/main/java/org/elasticsearch/cli/Command.java index 1fc7c9fe74f..3e2faf13657 100644 --- a/core/src/main/java/org/elasticsearch/cli/Command.java +++ b/core/src/main/java/org/elasticsearch/cli/Command.java @@ -19,15 +19,15 @@ package org.elasticsearch.cli; -import java.io.IOException; -import java.util.Arrays; - import joptsimple.OptionException; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.elasticsearch.common.SuppressForbidden; +import java.io.IOException; +import java.util.Arrays; + /** * An action to execute within a cli. */ @@ -112,4 +112,5 @@ public abstract class Command { * * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ protected abstract void execute(Terminal terminal, OptionSet options) throws Exception; + } diff --git a/core/src/main/java/org/elasticsearch/cli/SettingCommand.java b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java new file mode 100644 index 00000000000..868975ac6ff --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cli/SettingCommand.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cli; + +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import joptsimple.util.KeyValuePair; + +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +public abstract class SettingCommand extends Command { + + private final OptionSpec settingOption; + + public SettingCommand(String description) { + super(description); + this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class); + } + + @Override + protected void execute(Terminal terminal, OptionSet options) throws Exception { + final Map settings = new HashMap<>(); + for (final KeyValuePair kvp : settingOption.values(options)) { + if (kvp.value.isEmpty()) { + throw new UserError(ExitCodes.USAGE, "Setting [" + kvp.key + "] must not be empty"); + } + settings.put(kvp.key, kvp.value); + } + + putSystemPropertyIfSettingIsMissing(settings, "path.conf", "es.path.conf"); + putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data"); + putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home"); + putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs"); + + execute(terminal, options, settings); + } + + protected static void putSystemPropertyIfSettingIsMissing(final Map settings, final String setting, final String key) { + final String value = System.getProperty(key); + if (value != null) { + if (settings.containsKey(setting)) { + final String message = + String.format( + Locale.ROOT, + "duplicate setting [%s] found via command-line [%s] and system property [%s]", + setting, + settings.get(setting), + value); + throw new IllegalArgumentException(message); + } else { + settings.put(setting, value); + } + } + } + + protected abstract void execute(Terminal terminal, OptionSet options, Map settings) throws Exception; + +} diff --git a/core/src/main/java/org/elasticsearch/client/Client.java b/core/src/main/java/org/elasticsearch/client/Client.java index 47e8e43f37b..0cf22d7a2c4 100644 --- a/core/src/main/java/org/elasticsearch/client/Client.java +++ b/core/src/main/java/org/elasticsearch/client/Client.java @@ -42,12 +42,6 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.percolate.MultiPercolateRequest; -import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateResponse; -import org.elasticsearch.action.percolate.PercolateRequest; -import org.elasticsearch.action.percolate.PercolateRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequestBuilder; import org.elasticsearch.action.search.ClearScrollResponse; @@ -419,36 +413,6 @@ public interface Client extends ElasticsearchClient, Releasable { */ MultiTermVectorsRequestBuilder prepareMultiTermVectors(); - /** - * Percolates a request returning the matches documents. - */ - ActionFuture percolate(PercolateRequest request); - - /** - * Percolates a request returning the matches documents. - */ - void percolate(PercolateRequest request, ActionListener listener); - - /** - * Percolates a request returning the matches documents. - */ - PercolateRequestBuilder preparePercolate(); - - /** - * Performs multiple percolate requests. - */ - ActionFuture multiPercolate(MultiPercolateRequest request); - - /** - * Performs multiple percolate requests. - */ - void multiPercolate(MultiPercolateRequest request, ActionListener listener); - - /** - * Performs multiple percolate requests. - */ - MultiPercolateRequestBuilder prepareMultiPercolate(); - /** * Computes a score explanation for the specified request. * diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 6083422862c..e36bc4b8d77 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -295,14 +295,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; -import org.elasticsearch.action.percolate.MultiPercolateAction; -import org.elasticsearch.action.percolate.MultiPercolateRequest; -import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateResponse; -import org.elasticsearch.action.percolate.PercolateAction; -import org.elasticsearch.action.percolate.PercolateRequest; -import org.elasticsearch.action.percolate.PercolateRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequestBuilder; @@ -623,36 +615,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new MultiTermVectorsRequestBuilder(this, MultiTermVectorsAction.INSTANCE); } - @Override - public ActionFuture percolate(final PercolateRequest request) { - return execute(PercolateAction.INSTANCE, request); - } - - @Override - public void percolate(final PercolateRequest request, final ActionListener listener) { - execute(PercolateAction.INSTANCE, request, listener); - } - - @Override - public PercolateRequestBuilder preparePercolate() { - return new PercolateRequestBuilder(this, PercolateAction.INSTANCE); - } - - @Override - public MultiPercolateRequestBuilder prepareMultiPercolate() { - return new MultiPercolateRequestBuilder(this, MultiPercolateAction.INSTANCE); - } - - @Override - public void multiPercolate(MultiPercolateRequest request, ActionListener listener) { - execute(MultiPercolateAction.INSTANCE, request, listener); - } - - @Override - public ActionFuture multiPercolate(MultiPercolateRequest request) { - return execute(MultiPercolateAction.INSTANCE, request); - } - @Override public ExplainRequestBuilder prepareExplain(String index, String type, String id) { return new ExplainRequestBuilder(this, ExplainAction.INSTANCE, index, type, id); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 47dd2ce9ae6..a02e399ac0c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -49,6 +49,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; @@ -79,6 +80,7 @@ public class ClusterModule extends AbstractModule { new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), Property.NodeScope); public static final List> DEFAULT_ALLOCATION_DECIDERS = Collections.unmodifiableList(Arrays.asList( + MaxRetryAllocationDecider.class, SameShardAllocationDecider.class, FilterAllocationDecider.class, ReplicaAfterPrimaryActiveAllocationDecider.class, diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index d3b5e7ecbad..a41d02d28fa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -42,7 +42,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidTypeNameException; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 53b094bc34b..a1e891bce3d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -331,15 +332,13 @@ public class IndexShardRoutingTable implements Iterable { public ShardIterator onlyNodeActiveInitializingShardsIt(String nodeId) { ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); - // fill it in a randomized fashion - for (int i = 0; i < activeShards.size(); i++) { - ShardRouting shardRouting = activeShards.get(i); + int seed = shuffler.nextSeed(); + for (ShardRouting shardRouting : shuffler.shuffle(activeShards, seed)) { if (nodeId.equals(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } - for (int i = 0; i < allInitializingShards.size(); i++) { - ShardRouting shardRouting = allInitializingShards.get(i); + for (ShardRouting shardRouting : shuffler.shuffle(allInitializingShards, seed)) { if (nodeId.equals(shardRouting.currentNodeId())) { ordered.add(shardRouting); } @@ -347,26 +346,31 @@ public class IndexShardRoutingTable implements Iterable { return new PlainShardIterator(shardId, ordered); } + public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String nodeAttributes, DiscoveryNodes discoveryNodes) { + return onlyNodeSelectorActiveInitializingShardsIt(new String[] {nodeAttributes}, discoveryNodes); + } + /** * Returns shards based on nodeAttributes given such as node name , node attribute, node IP * Supports node specifications in cluster API */ - public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String nodeAttribute, DiscoveryNodes discoveryNodes) { + public ShardIterator onlyNodeSelectorActiveInitializingShardsIt(String[] nodeAttributes, DiscoveryNodes discoveryNodes) { ArrayList ordered = new ArrayList<>(activeShards.size() + allInitializingShards.size()); - Set selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodesIds(nodeAttribute)); - - for (ShardRouting shardRouting : activeShards) { + Set selectedNodes = Sets.newHashSet(discoveryNodes.resolveNodesIds(nodeAttributes)); + int seed = shuffler.nextSeed(); + for (ShardRouting shardRouting : shuffler.shuffle(activeShards, seed)) { if (selectedNodes.contains(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } - for (ShardRouting shardRouting : allInitializingShards) { + for (ShardRouting shardRouting : shuffler.shuffle(allInitializingShards, seed)) { if (selectedNodes.contains(shardRouting.currentNodeId())) { ordered.add(shardRouting); } } if (ordered.isEmpty()) { - throw new IllegalArgumentException("No data node with criteria [" + nodeAttribute + "] found"); + throw new IllegalArgumentException("no data nodes with critera(s) " + + Strings.arrayToCommaDelimitedString(nodeAttributes) + "] found for shard:" + shardId()); } return new PlainShardIterator(shardId, ordered); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 70246026894..3fb6f55a919 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -177,8 +177,8 @@ public class OperationRouting extends AbstractComponent { ensureNodeIdExists(nodes, nodeId); return indexShard.onlyNodeActiveInitializingShardsIt(nodeId); case ONLY_NODES: - String nodeAttribute = preference.substring(Preference.ONLY_NODES.type().length() + 1); - return indexShard.onlyNodeSelectorActiveInitializingShardsIt(nodeAttribute, nodes); + String nodeAttributes = preference.substring(Preference.ONLY_NODES.type().length() + 1); + return indexShard.onlyNodeSelectorActiveInitializingShardsIt(nodeAttributes.split(","), nodes); default: throw new IllegalArgumentException("unknown preference [" + preferenceType + "]"); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java index 2670363364d..bc44cd1701c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/UnassignedInfo.java @@ -48,7 +48,6 @@ public final class UnassignedInfo implements ToXContent, Writeable { public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, Property.Dynamic, Property.IndexScope); - /** * Reason why the shard is in unassigned state. *

@@ -103,7 +102,11 @@ public final class UnassignedInfo implements ToXContent, Writeable { /** * A better replica location is identified and causes the existing replica allocation to be cancelled. */ - REALLOCATED_REPLICA; + REALLOCATED_REPLICA, + /** + * Unassigned as a result of a failed primary while the replica was initializing. + */ + PRIMARY_FAILED; } private final Reason reason; @@ -112,6 +115,7 @@ public final class UnassignedInfo implements ToXContent, Writeable { private final long lastComputedLeftDelayNanos; // how long to delay shard allocation, not serialized (always positive, 0 means no delay) private final String message; private final Throwable failure; + private final int failedAllocations; /** * creates an UnassingedInfo object based **current** time @@ -120,7 +124,7 @@ public final class UnassignedInfo implements ToXContent, Writeable { * @param message more information about cause. **/ public UnassignedInfo(Reason reason, String message) { - this(reason, message, null, System.nanoTime(), System.currentTimeMillis()); + this(reason, message, null, reason == Reason.ALLOCATION_FAILED ? 1 : 0, System.nanoTime(), System.currentTimeMillis()); } /** @@ -130,13 +134,16 @@ public final class UnassignedInfo implements ToXContent, Writeable { * @param unassignedTimeNanos the time to use as the base for any delayed re-assignment calculation * @param unassignedTimeMillis the time of unassignment used to display to in our reporting. */ - public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, long unassignedTimeNanos, long unassignedTimeMillis) { + public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, int failedAllocations, long unassignedTimeNanos, long unassignedTimeMillis) { this.reason = reason; this.unassignedTimeMillis = unassignedTimeMillis; this.unassignedTimeNanos = unassignedTimeNanos; this.lastComputedLeftDelayNanos = 0L; this.message = message; this.failure = failure; + this.failedAllocations = failedAllocations; + assert (failedAllocations > 0) == (reason == Reason.ALLOCATION_FAILED): + "failedAllocations: " + failedAllocations + " for reason " + reason; assert !(message == null && failure != null) : "provide a message if a failure exception is provided"; } @@ -147,17 +154,19 @@ public final class UnassignedInfo implements ToXContent, Writeable { this.lastComputedLeftDelayNanos = newComputedLeftDelayNanos; this.message = unassignedInfo.message; this.failure = unassignedInfo.failure; + this.failedAllocations = unassignedInfo.failedAllocations; } public UnassignedInfo(StreamInput in) throws IOException { this.reason = Reason.values()[(int) in.readByte()]; this.unassignedTimeMillis = in.readLong(); // As System.nanoTime() cannot be compared across different JVMs, reset it to now. - // This means that in master failover situations, elapsed delay time is forgotten. + // This means that in master fail-over situations, elapsed delay time is forgotten. this.unassignedTimeNanos = System.nanoTime(); this.lastComputedLeftDelayNanos = 0L; this.message = in.readOptionalString(); this.failure = in.readThrowable(); + this.failedAllocations = in.readVInt(); } public void writeTo(StreamOutput out) throws IOException { @@ -166,12 +175,18 @@ public final class UnassignedInfo implements ToXContent, Writeable { // Do not serialize unassignedTimeNanos as System.nanoTime() cannot be compared across different JVMs out.writeOptionalString(message); out.writeThrowable(failure); + out.writeVInt(failedAllocations); } public UnassignedInfo readFrom(StreamInput in) throws IOException { return new UnassignedInfo(in); } + /** + * Returns the number of previously failed allocations of this shard. + */ + public int getNumFailedAllocations() { return failedAllocations; } + /** * The reason why the shard is unassigned. */ @@ -325,7 +340,11 @@ public final class UnassignedInfo implements ToXContent, Writeable { StringBuilder sb = new StringBuilder(); sb.append("[reason=").append(reason).append("]"); sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]"); + if (failedAllocations > 0) { + sb.append(", failed_attempts[").append(failedAllocations).append("]"); + } String details = getDetails(); + if (details != null) { sb.append(", details[").append(details).append("]"); } @@ -342,6 +361,9 @@ public final class UnassignedInfo implements ToXContent, Writeable { builder.startObject("unassigned_info"); builder.field("reason", reason); builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)); + if (failedAllocations > 0) { + builder.field("failed_attempts", failedAllocations); + } String details = getDetails(); if (details != null) { builder.field("details", details); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index e1bbbb7f4ab..d59113675d8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -222,8 +222,10 @@ public class AllocationService extends AbstractComponent { List orderedFailedShards = new ArrayList<>(failedShards); orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary())); for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) { + UnassignedInfo unassignedInfo = failedShard.shard.unassignedInfo(); + final int failedAllocations = unassignedInfo != null ? unassignedInfo.getNumFailedAllocations() : 0; changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, - System.nanoTime(), System.currentTimeMillis())); + failedAllocations + 1, System.nanoTime(), System.currentTimeMillis())); } if (!changed) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); @@ -257,16 +259,13 @@ public class AllocationService extends AbstractComponent { .collect(Collectors.joining(", ")); } - public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands) { - return reroute(clusterState, commands, false); - } - - public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain) { + public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain, boolean retryFailed) { RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); // we don't shuffle the unassigned shards here, to try and get as close as possible to // a consistent result of the effect the commands have on the routing // this allows systems to dry run the commands, see the resulting cluster state, and act on it - RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); + RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, + clusterInfoService.getClusterInfo(), currentNanoTime(), retryFailed); // don't short circuit deciders, we want a full explanation allocation.debugDecision(true); // we ignore disable allocation, because commands are explicit @@ -305,7 +304,8 @@ public class AllocationService extends AbstractComponent { RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); // shuffle the unassigned nodes, just so we won't have things like poison failed shards routingNodes.unassigned().shuffle(); - RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); + RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, + clusterInfoService.getClusterInfo(), currentNanoTime(), false); allocation.debugDecision(debug); if (!reroute(allocation)) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); @@ -437,7 +437,7 @@ public class AllocationService extends AbstractComponent { // now, go over all the shards routing on the node, and fail them for (ShardRouting shardRouting : node.copyShards()) { UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "node_left[" + node.nodeId() + "]", null, - allocation.getCurrentNanoTime(), System.currentTimeMillis()); + 0, allocation.getCurrentNanoTime(), System.currentTimeMillis()); applyFailedShard(allocation, shardRouting, false, unassignedInfo); } // its a dead node, remove it, note, its important to remove it *after* we apply failed shard @@ -457,8 +457,8 @@ public class AllocationService extends AbstractComponent { boolean changed = false; for (ShardRouting routing : replicas) { changed |= applyFailedShard(allocation, routing, false, - new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + new UnassignedInfo(UnassignedInfo.Reason.PRIMARY_FAILED, "primary failed while replica initializing", + null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis())); } return changed; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java index a13862fed26..ef2e42eed76 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/FailedRerouteAllocation.java @@ -58,7 +58,7 @@ public class FailedRerouteAllocation extends RoutingAllocation { private final List failedShards; public FailedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List failedShards, ClusterInfo clusterInfo) { - super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); + super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false); this.failedShards = failedShards; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java index 60ca3a8d5fd..0df8074e14c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java @@ -134,6 +134,8 @@ public class RoutingAllocation { private boolean ignoreDisable = false; + private final boolean retryFailed; + private boolean debugDecision = false; private boolean hasPendingAsyncFetch = false; @@ -148,7 +150,7 @@ public class RoutingAllocation { * @param clusterState cluster state before rerouting * @param currentNanoTime the nano time to use for all delay allocation calculation (typically {@link System#nanoTime()}) */ - public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime) { + public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime, boolean retryFailed) { this.deciders = deciders; this.routingNodes = routingNodes; this.metaData = clusterState.metaData(); @@ -156,6 +158,7 @@ public class RoutingAllocation { this.customs = clusterState.customs(); this.clusterInfo = clusterInfo; this.currentNanoTime = currentNanoTime; + this.retryFailed = retryFailed; } /** returns the nano time captured at the beginning of the allocation. used to make sure all time based decisions are aligned */ @@ -297,4 +300,8 @@ public class RoutingAllocation { public void setHasPendingAsyncFetch() { this.hasPendingAsyncFetch = true; } + + public boolean isRetryFailed() { + return retryFailed; + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java index e9570edd9c3..0f55ab4fda1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/StartedRerouteAllocation.java @@ -36,7 +36,7 @@ public class StartedRerouteAllocation extends RoutingAllocation { private final List startedShards; public StartedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List startedShards, ClusterInfo clusterInfo) { - super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); + super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false); this.startedShards = startedShards; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java index 20918159619..0fb27a80154 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; @@ -228,4 +229,22 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom protected void extraXContent(XContentBuilder builder) throws IOException { } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + AbstractAllocateAllocationCommand other = (AbstractAllocateAllocationCommand) obj; + // Override equals and hashCode for testing + return Objects.equals(index, other.index) && + Objects.equals(shardId, other.shardId) && + Objects.equals(node, other.node); + } + + @Override + public int hashCode() { + // Override equals and hashCode for testing + return Objects.hash(index, shardId, node); + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index d4191292cfc..c80afde3086 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -125,7 +125,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation // we need to move the unassigned info back to treat it as if it was index creation unassignedInfoToUpdate = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(), - shardRouting.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis()); + shardRouting.unassignedInfo().getFailure(), 0, System.nanoTime(), System.currentTimeMillis()); } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java index b651580ea74..8c47deee66f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java @@ -136,6 +136,4 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); return new RerouteExplanation(this, decision); } - - } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java index 736018531fa..92c1ffa9921 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommand.java @@ -22,13 +22,16 @@ package org.elasticsearch.cluster.routing.allocation.command; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; /** - * This interface defines the basic methods of commands for allocation + * A command to move shards in some way. + * + * Commands are registered in {@link NetworkModule}. */ public interface AllocationCommand extends NamedWriteable, ToXContent { interface Parser { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java index ca0eab6e33b..10ba3f55944 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java @@ -20,12 +20,12 @@ package org.elasticsearch.cluster.routing.allocation.command; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -33,12 +33,13 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Objects; /** * A simple {@link AllocationCommand} composite managing several * {@link AllocationCommand} implementations */ -public class AllocationCommands { +public class AllocationCommands extends ToXContentToBytes { private final List commands = new ArrayList<>(); /** @@ -171,21 +172,31 @@ public class AllocationCommands { return commands; } - /** - * Writes {@link AllocationCommands} to a {@link XContentBuilder} - * - * @param commands {@link AllocationCommands} to write - * @param builder {@link XContentBuilder} to use - * @param params Parameters to use for building - * @throws IOException if something bad happens while building the content - */ - public static void toXContent(AllocationCommands commands, XContentBuilder builder, ToXContent.Params params) throws IOException { + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startArray("commands"); - for (AllocationCommand command : commands.commands) { + for (AllocationCommand command : commands) { builder.startObject(); builder.field(command.name(), command); builder.endObject(); } builder.endArray(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + AllocationCommands other = (AllocationCommands) obj; + // Override equals and hashCode for testing + return Objects.equals(commands, other.commands); + } + + @Override + public int hashCode() { + // Override equals and hashCode for testing + return Objects.hashCode(commands); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java index 0013061e8ea..2eb3af9d4f9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java @@ -83,4 +83,18 @@ public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAlloc protected void extraXContent(XContentBuilder builder) throws IOException { builder.field(ACCEPT_DATA_LOSS_FIELD, acceptDataLoss); } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + BasePrimaryAllocationCommand other = (BasePrimaryAllocationCommand) obj; + return acceptDataLoss == other.acceptDataLoss; + } + + @Override + public int hashCode() { + return 31 * super.hashCode() + Boolean.hashCode(acceptDataLoss); + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index 60b0842b273..9adb3e30708 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -240,4 +241,23 @@ public class CancelAllocationCommand implements AllocationCommand { } return new CancelAllocationCommand(index, shardId, nodeId, allowPrimary); } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CancelAllocationCommand other = (CancelAllocationCommand) obj; + // Override equals and hashCode for testing + return Objects.equals(index, other.index) && + Objects.equals(shardId, other.shardId) && + Objects.equals(node, other.node) && + Objects.equals(allowPrimary, other.allowPrimary); + } + + @Override + public int hashCode() { + // Override equals and hashCode for testing + return Objects.hash(index, shardId, node, allowPrimary); + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index a2e1a54e515..69bd8f0eeca 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; /** * A command that moves a shard from a specific node to another node.
@@ -195,4 +196,23 @@ public class MoveAllocationCommand implements AllocationCommand { } return new MoveAllocationCommand(index, shardId, fromNode, toNode); } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + MoveAllocationCommand other = (MoveAllocationCommand) obj; + // Override equals and hashCode for testing + return Objects.equals(index, other.index) && + Objects.equals(shardId, other.shardId) && + Objects.equals(fromNode, other.fromNode) && + Objects.equals(toNode, other.toNode); + } + + @Override + public int hashCode() { + // Override equals and hashCode for testing + return Objects.hash(index, shardId, fromNode, toNode); + } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java new file mode 100644 index 00000000000..6a8a0ccc5fa --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.decider; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; + +/** + * An allocation decider that prevents shards from being allocated on any node if the shards allocation has been retried N times without + * success. This means if a shard has been INITIALIZING N times in a row without being moved to STARTED the shard will be ignored until + * the setting for index.allocation.max_retry is raised. The default value is 5. + * Note: This allocation decider also allows allocation of repeatedly failing shards when the /_cluster/reroute?retry_failed=true + * API is manually invoked. This allows single retries without raising the limits. + * + * @see RoutingAllocation#isRetryFailed() + */ +public class MaxRetryAllocationDecider extends AllocationDecider { + + public static final Setting SETTING_ALLOCATION_MAX_RETRY = Setting.intSetting("index.allocation.max_retries", 5, 0, + Setting.Property.Dynamic, Setting.Property.IndexScope); + + public static final String NAME = "max_retry"; + + /** + * Initializes a new {@link MaxRetryAllocationDecider} + * + * @param settings {@link Settings} used by this {@link AllocationDecider} + */ + @Inject + public MaxRetryAllocationDecider(Settings settings) { + super(settings); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { + UnassignedInfo unassignedInfo = shardRouting.unassignedInfo(); + if (unassignedInfo != null && unassignedInfo.getNumFailedAllocations() > 0) { + final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); + final int maxRetry = SETTING_ALLOCATION_MAX_RETRY.get(indexMetaData.getSettings()); + if (allocation.isRetryFailed()) { // manual allocation - retry + // if we are called via the _reroute API we ignore the failure counter and try to allocate + // this improves the usability since people don't need to raise the limits to issue retries since a simple _reroute call is + // enough to manually retry. + return allocation.decision(Decision.YES, NAME, "shard has already failed allocating [" + + unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed " + + unassignedInfo.toString() + " - retrying once on manual allocation"); + } else if (unassignedInfo.getNumFailedAllocations() >= maxRetry) { + return allocation.decision(Decision.NO, NAME, "shard has already failed allocating [" + + unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed " + + unassignedInfo.toString() + " - manually call [/_cluster/reroute?retry_failed=true] to retry"); + } + } + return allocation.decision(Decision.YES, NAME, "shard has no previous failures"); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return canAllocate(shardRouting, allocation); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/Base64.java b/core/src/main/java/org/elasticsearch/common/Base64.java deleted file mode 100644 index fa499a55d4d..00000000000 --- a/core/src/main/java/org/elasticsearch/common/Base64.java +++ /dev/null @@ -1,1621 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.Locale; -import java.util.Objects; - -/** - *

Encodes and decodes to and from Base64 notation.

- *

Homepage: http://iharder.net/base64.

- *

- * Example: - *

- * String encoded = Base64.encode( myByteArray ); - *
- * byte[] myByteArray = Base64.decode( encoded ); - *

- * The options parameter, which appears in a few places, is used to pass - * several pieces of information to the encoder. In the "higher level" methods such as - * encodeBytes( bytes, options ) the options parameter can be used to indicate such - * things as first gzipping the bytes before encoding them, not inserting linefeeds, - * and encoding using the URL-safe and Ordered dialects. - *

- * Note, according to RFC3548, - * Section 2.1, implementations should not add line feeds unless explicitly told - * to do so. I've got Base64 set to this behavior now, although earlier versions - * broke lines by default. - *

- * The constants defined in Base64 can be OR-ed together to combine options, so you - * might make a call like this: - *

- * String encoded = Base64.encodeBytes( mybytes, Base64.GZIP | Base64.DO_BREAK_LINES ); - *

to compress the data before encoding it and then making the output have newline characters. - *

Also... - * String encoded = Base64.encodeBytes( crazyString.getBytes() ); - *

- * Change Log: - *

    - *
  • v2.3.7 - Fixed subtle bug when base 64 input stream contained the - * value 01111111, which is an invalid base 64 character but should not - * throw an ArrayIndexOutOfBoundsException either. Led to discovery of - * mishandling (or potential for better handling) of other bad input - * characters. You should now get an IOException if you try decoding - * something that has bad characters in it.
  • - *
  • v2.3.6 - Fixed bug when breaking lines and the final byte of the encoded - * string ended in the last column; the buffer was not properly shrunk and - * contained an extra (null) byte that made it into the string.
  • - *
  • v2.3.5 - Fixed bug in {@code #encodeFromFile} where estimated buffer size - * was wrong for files of size 31, 34, and 37 bytes.
  • - *
  • v2.3.4 - Fixed bug when working with gzipped streams whereby flushing - * the Base64.OutputStream closed the Base64 encoding (by padding with equals - * signs) too soon. Also added an option to suppress the automatic decoding - * of gzipped streams. Also added experimental support for specifying a - * class loader when using the - * {@code #decodeToObject(java.lang.String, int, java.lang.ClassLoader)} - * method.
  • - *
  • v2.3.3 - Changed default char encoding to US-ASCII which reduces the internal Java - * footprint with its CharEncoders and so forth. Fixed some javadocs that were - * inconsistent. Removed imports and specified things like java.io.IOException - * explicitly inline.
  • - *
  • v2.3.2 - Reduced memory footprint! Finally refined the "guessing" of how big the - * final encoded data will be so that the code doesn't have to create two output - * arrays: an oversized initial one and then a final, exact-sized one. Big win - * when using the {@link #encodeBytesToBytes(byte[])} family of methods (and not - * using the gzip options which uses a different mechanism with streams and stuff).
  • - *
  • v2.3.1 - Added {@link #encodeBytesToBytes(byte[], int, int, int)} and some - * similar helper methods to be more efficient with memory by not returning a - * String but just a byte array.
  • - *
  • v2.3 - This is not a drop-in replacement! This is two years of comments - * and bug fixes queued up and finally executed. Thanks to everyone who sent - * me stuff, and I'm sorry I wasn't able to distribute your fixes to everyone else. - * Much bad coding was cleaned up including throwing exceptions where necessary - * instead of returning null values or something similar. Here are some changes - * that may affect you: - *
      - *
    • Does not break lines, by default. This is to keep in compliance with - * RFC3548.
    • - *
    • Throws exceptions instead of returning null values. Because some operations - * (especially those that may permit the GZIP option) use IO streams, there - * is a possibility of an java.io.IOException being thrown. After some discussion and - * thought, I've changed the behavior of the methods to throw java.io.IOExceptions - * rather than return null if ever there's an error. I think this is more - * appropriate, though it will require some changes to your code. Sorry, - * it should have been done this way to begin with.
    • - *
    • Removed all references to System.out, System.err, and the like. - * Shame on me. All I can say is sorry they were ever there.
    • - *
    • Throws NullPointerExceptions and IllegalArgumentExceptions as needed - * such as when passed arrays are null or offsets are invalid.
    • - *
    • Cleaned up as much javadoc as I could to avoid any javadoc warnings. - * This was especially annoying before for people who were thorough in their - * own projects and then had gobs of javadoc warnings on this file.
    • - *
    - *
  • v2.2.1 - Fixed bug using URL_SAFE and ORDERED encodings. Fixed bug - * when using very small files (~< 40 bytes).
  • - *
  • v2.2 - Added some helper methods for encoding/decoding directly from - * one file to the next. Also added a main() method to support command line - * encoding/decoding from one file to the next. Also added these Base64 dialects: - *
      - *
    1. The default is RFC3548 format.
    2. - *
    3. Calling Base64.setFormat(Base64.BASE64_FORMAT.URLSAFE_FORMAT) generates - * URL and file name friendly format as described in Section 4 of RFC3548. - * http://www.faqs.org/rfcs/rfc3548.html
    4. - *
    5. Calling Base64.setFormat(Base64.BASE64_FORMAT.ORDERED_FORMAT) generates - * URL and file name friendly format that preserves lexical ordering as described - * in http://www.faqs.org/qa/rfcc-1940.html
    6. - *
    - * Special thanks to Jim Kellerman at http://www.powerset.com/ - * for contributing the new Base64 dialects. - *
  • - *
  • v2.1 - Cleaned up javadoc comments and unused variables and methods. Added - * some convenience methods for reading and writing to and from files.
  • - *
  • v2.0.2 - Now specifies UTF-8 encoding in places where the code fails on systems - * with other encodings (like EBCDIC).
  • - *
  • v2.0.1 - Fixed an error when decoding a single byte, that is, when the - * encoded data was a single byte.
  • - *
  • v2.0 - I got rid of methods that used booleans to set options. - * Now everything is more consolidated and cleaner. The code now detects - * when data that's being decoded is gzip-compressed and will decompress it - * automatically. Generally things are cleaner. You'll probably have to - * change some method calls that you were making to support the new - * options format (ints that you "OR" together).
  • - *
  • v1.5.1 - Fixed bug when decompressing and decoding to a - * byte[] using decode( String s, boolean gzipCompressed ). - * Added the ability to "suspend" encoding in the Output Stream so - * you can turn on and off the encoding if you need to embed base64 - * data in an otherwise "normal" stream (like an XML file).
  • - *
  • v1.5 - Output stream pases on flush() command but doesn't do anything itself. - * This helps when using GZIP streams. - * Added the ability to GZip-compress objects before encoding them.
  • - *
  • v1.4 - Added helper methods to read/write files.
  • - *
  • v1.3.6 - Fixed OutputStream.flush() so that 'position' is reset.
  • - *
  • v1.3.5 - Added flag to turn on and off line breaks. Fixed bug in input stream - * where last buffer being read, if not completely full, was not returned.
  • - *
  • v1.3.4 - Fixed when "improperly padded stream" error was thrown at the wrong time.
  • - *
  • v1.3.3 - Fixed I/O streams which were totally messed up.
  • - *
- *

- * I am placing this code in the Public Domain. Do with it as you will. - * This software comes with no guarantees or warranties but with - * plenty of well-wishing instead! - * Please visit http://iharder.net/base64 - * periodically to check for updates or to contribute improvements. - * - * @author Robert Harder - * @author rob@iharder.net - * @version 2.3.7 - */ -public final class Base64 { - -/* ******** P U B L I C F I E L D S ******** */ - - - /** - * No options specified. Value is zero. - */ - public final static int NO_OPTIONS = 0; - - /** - * Specify encoding in first bit. Value is one. - */ - public final static int ENCODE = 1; - - - /** - * Specify decoding in first bit. Value is zero. - */ - public final static int DECODE = 0; - - - /** - * Specify that data should be gzip-compressed in second bit. Value is two. - */ - public final static int GZIP = 2; - - /** - * Specify that gzipped data should not be automatically gunzipped. - */ - public final static int DONT_GUNZIP = 4; - - - /** - * Do break lines when encoding. Value is 8. - */ - public final static int DO_BREAK_LINES = 8; - - /** - * Encode using Base64-like encoding that is URL- and Filename-safe as described - * in Section 4 of RFC3548: - * http://www.faqs.org/rfcs/rfc3548.html. - * It is important to note that data encoded this way is not officially valid Base64, - * or at the very least should not be called Base64 without also specifying that is - * was encoded using the URL- and Filename-safe dialect. - */ - public final static int URL_SAFE = 16; - - - /** - * Encode using the special "ordered" dialect of Base64 described here: - * http://www.faqs.org/qa/rfcc-1940.html. - */ - public final static int ORDERED = 32; - - -/* ******** P R I V A T E F I E L D S ******** */ - - - /** - * Maximum line length (76) of Base64 output. - */ - private final static int MAX_LINE_LENGTH = 76; - - - /** - * The equals sign (=) as a byte. - */ - private final static byte EQUALS_SIGN = (byte) '='; - - - /** - * The new line character (\n) as a byte. - */ - private final static byte NEW_LINE = (byte) '\n'; - - - /** - * Preferred encoding. - */ - public final static Charset PREFERRED_ENCODING = Charset.forName("US-ASCII"); - - - private final static byte WHITE_SPACE_ENC = -5; // Indicates white space in encoding - private final static byte EQUALS_SIGN_ENC = -1; // Indicates equals sign in encoding - - -/* ******** S T A N D A R D B A S E 6 4 A L P H A B E T ******** */ - - /** - * The 64 valid Base64 values. - */ - /* Host platform me be something funny like EBCDIC, so we hardcode these values. */ - private final static byte[] _STANDARD_ALPHABET = { - (byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', - (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', - (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', - (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', - (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', - (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', - (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', - (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', - (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', - (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '+', (byte) '/' - }; - - - /** - * Translates a Base64 value to either its 6-bit reconstruction value - * or a negative number indicating some other meaning. - */ - private final static byte[] _STANDARD_DECODABET = { - -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 - -5, -5, // Whitespace: Tab and Linefeed - -9, -9, // Decimal 11 - 12 - -5, // Whitespace: Carriage Return - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 - -9, -9, -9, -9, -9, // Decimal 27 - 31 - -5, // Whitespace: Space - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 - 62, // Plus sign at decimal 43 - -9, -9, -9, // Decimal 44 - 46 - 63, // Slash at decimal 47 - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine - -9, -9, -9, // Decimal 58 - 60 - -1, // Equals sign at decimal 61 - -9, -9, -9, // Decimal 62 - 64 - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' - 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' - -9, -9, -9, -9, -9, -9, // Decimal 91 - 96 - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' - -9, -9, -9, -9, -9 // Decimal 123 - 127 - , -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 128 - 139 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 140 - 152 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 153 - 165 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 166 - 178 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 179 - 191 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 192 - 204 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 205 - 217 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 218 - 230 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 231 - 243 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9 // Decimal 244 - 255 - }; - - -/* ******** U R L S A F E B A S E 6 4 A L P H A B E T ******** */ - - /** - * Used in the URL- and Filename-safe dialect described in Section 4 of RFC3548: - * http://www.faqs.org/rfcs/rfc3548.html. - * Notice that the last two bytes become "hyphen" and "underscore" instead of "plus" and "slash." - */ - private final static byte[] _URL_SAFE_ALPHABET = { - (byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', - (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', - (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', - (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', - (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', - (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', - (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', - (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z', - (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', (byte) '5', - (byte) '6', (byte) '7', (byte) '8', (byte) '9', (byte) '-', (byte) '_' - }; - - /** - * Used in decoding URL- and Filename-safe dialects of Base64. - */ - private final static byte[] _URL_SAFE_DECODABET = { - -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 - -5, -5, // Whitespace: Tab and Linefeed - -9, -9, // Decimal 11 - 12 - -5, // Whitespace: Carriage Return - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 - -9, -9, -9, -9, -9, // Decimal 27 - 31 - -5, // Whitespace: Space - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 - -9, // Plus sign at decimal 43 - -9, // Decimal 44 - 62, // Minus sign at decimal 45 - -9, // Decimal 46 - -9, // Slash at decimal 47 - 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // Numbers zero through nine - -9, -9, -9, // Decimal 58 - 60 - -1, // Equals sign at decimal 61 - -9, -9, -9, // Decimal 62 - 64 - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, // Letters 'A' through 'N' - 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // Letters 'O' through 'Z' - -9, -9, -9, -9, // Decimal 91 - 94 - 63, // Underscore at decimal 95 - -9, // Decimal 96 - 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, // Letters 'a' through 'm' - 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // Letters 'n' through 'z' - -9, -9, -9, -9, -9 // Decimal 123 - 127 - , -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 128 - 139 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 140 - 152 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 153 - 165 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 166 - 178 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 179 - 191 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 192 - 204 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 205 - 217 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 218 - 230 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 231 - 243 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9 // Decimal 244 - 255 - }; - - -/* ******** O R D E R E D B A S E 6 4 A L P H A B E T ******** */ - - /** - * I don't get the point of this technique, but someone requested it, - * and it is described here: - * http://www.faqs.org/qa/rfcc-1940.html. - */ - private final static byte[] _ORDERED_ALPHABET = { - (byte) '-', - (byte) '0', (byte) '1', (byte) '2', (byte) '3', (byte) '4', - (byte) '5', (byte) '6', (byte) '7', (byte) '8', (byte) '9', - (byte) 'A', (byte) 'B', (byte) 'C', (byte) 'D', (byte) 'E', (byte) 'F', (byte) 'G', - (byte) 'H', (byte) 'I', (byte) 'J', (byte) 'K', (byte) 'L', (byte) 'M', (byte) 'N', - (byte) 'O', (byte) 'P', (byte) 'Q', (byte) 'R', (byte) 'S', (byte) 'T', (byte) 'U', - (byte) 'V', (byte) 'W', (byte) 'X', (byte) 'Y', (byte) 'Z', - (byte) '_', - (byte) 'a', (byte) 'b', (byte) 'c', (byte) 'd', (byte) 'e', (byte) 'f', (byte) 'g', - (byte) 'h', (byte) 'i', (byte) 'j', (byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', - (byte) 'o', (byte) 'p', (byte) 'q', (byte) 'r', (byte) 's', (byte) 't', (byte) 'u', - (byte) 'v', (byte) 'w', (byte) 'x', (byte) 'y', (byte) 'z' - }; - - /** - * Used in decoding the "ordered" dialect of Base64. - */ - private final static byte[] _ORDERED_DECODABET = { - -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 0 - 8 - -5, -5, // Whitespace: Tab and Linefeed - -9, -9, // Decimal 11 - 12 - -5, // Whitespace: Carriage Return - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 14 - 26 - -9, -9, -9, -9, -9, // Decimal 27 - 31 - -5, // Whitespace: Space - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 33 - 42 - -9, // Plus sign at decimal 43 - -9, // Decimal 44 - 0, // Minus sign at decimal 45 - -9, // Decimal 46 - -9, // Slash at decimal 47 - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, // Numbers zero through nine - -9, -9, -9, // Decimal 58 - 60 - -1, // Equals sign at decimal 61 - -9, -9, -9, // Decimal 62 - 64 - 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, // Letters 'A' through 'M' - 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, // Letters 'N' through 'Z' - -9, -9, -9, -9, // Decimal 91 - 94 - 37, // Underscore at decimal 95 - -9, // Decimal 96 - 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, // Letters 'a' through 'm' - 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, // Letters 'n' through 'z' - -9, -9, -9, -9, -9 // Decimal 123 - 127 - , -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 128 - 139 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 140 - 152 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 153 - 165 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 166 - 178 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 179 - 191 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 192 - 204 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 205 - 217 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 218 - 230 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, // Decimal 231 - 243 - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, -9 // Decimal 244 - 255 - }; - - -/* ******** D E T E R M I N E W H I C H A L H A B E T ******** */ - - - /** - * Returns one of the _SOMETHING_ALPHABET byte arrays depending on - * the options specified. - * It's possible, though silly, to specify ORDERED and URLSAFE - * in which case one of them will be picked, though there is - * no guarantee as to which one will be picked. - */ - private final static byte[] getAlphabet(int options) { - if ((options & URL_SAFE) == URL_SAFE) { - return _URL_SAFE_ALPHABET; - } else if ((options & ORDERED) == ORDERED) { - return _ORDERED_ALPHABET; - } else { - return _STANDARD_ALPHABET; - } - } // end getAlphabet - - - /** - * Returns one of the _SOMETHING_DECODABET byte arrays depending on - * the options specified. - * It's possible, though silly, to specify ORDERED and URL_SAFE - * in which case one of them will be picked, though there is - * no guarantee as to which one will be picked. - */ - private final static byte[] getDecodabet(int options) { - if ((options & URL_SAFE) == URL_SAFE) { - return _URL_SAFE_DECODABET; - } else if ((options & ORDERED) == ORDERED) { - return _ORDERED_DECODABET; - } else { - return _STANDARD_DECODABET; - } - } // end getAlphabet - - - /** - * Defeats instantiation. - */ - private Base64() { - } - - -/* ******** E N C O D I N G M E T H O D S ******** */ - - - /** - * Encodes up to the first three bytes of array threeBytes - * and returns a four-byte array in Base64 notation. - * The actual number of significant bytes in your array is - * given by numSigBytes. - * The array threeBytes needs only be as big as - * numSigBytes. - * Code can reuse a byte array by passing a four-byte array as b4. - * - * @param b4 A reusable byte array to reduce array instantiation - * @param threeBytes the array to convert - * @param numSigBytes the number of significant bytes in your array - * @return four byte array in Base64 notation. - * @since 1.5.1 - */ - private static byte[] encode3to4(byte[] b4, byte[] threeBytes, int numSigBytes, int options) { - encode3to4(threeBytes, 0, numSigBytes, b4, 0, options); - return b4; - } // end encode3to4 - - - /** - *

Encodes up to three bytes of the array source - * and writes the resulting four Base64 bytes to destination. - * The source and destination arrays can be manipulated - * anywhere along their length by specifying - * srcOffset and destOffset. - * This method does not check to make sure your arrays - * are large enough to accommodate srcOffset + 3 for - * the source array or destOffset + 4 for - * the destination array. - * The actual number of significant bytes in your array is - * given by numSigBytes.

- *

This is the lowest level of the encoding methods with - * all possible parameters.

- * - * @param source the array to convert - * @param srcOffset the index where conversion begins - * @param numSigBytes the number of significant bytes in your array - * @param destination the array to hold the conversion - * @param destOffset the index where output will be put - * @return the destination array - * @since 1.3 - */ - private static byte[] encode3to4( - byte[] source, int srcOffset, int numSigBytes, - byte[] destination, int destOffset, int options) { - - byte[] ALPHABET = getAlphabet(options); - - // 1 2 3 - // 01234567890123456789012345678901 Bit position - // --------000000001111111122222222 Array position from threeBytes - // --------| || || || | Six bit groups to index ALPHABET - // >>18 >>12 >> 6 >> 0 Right shift necessary - // 0x3f 0x3f 0x3f Additional AND - - // Create buffer with zero-padding if there are only one or two - // significant bytes passed in the array. - // We have to shift left 24 in order to flush out the 1's that appear - // when Java treats a value as negative that is cast from a byte to an int. - int inBuff = (numSigBytes > 0 ? ((source[srcOffset] << 24) >>> 8) : 0) - | (numSigBytes > 1 ? ((source[srcOffset + 1] << 24) >>> 16) : 0) - | (numSigBytes > 2 ? ((source[srcOffset + 2] << 24) >>> 24) : 0); - - switch (numSigBytes) { - case 3: - destination[destOffset] = ALPHABET[(inBuff >>> 18)]; - destination[destOffset + 1] = ALPHABET[(inBuff >>> 12) & 0x3f]; - destination[destOffset + 2] = ALPHABET[(inBuff >>> 6) & 0x3f]; - destination[destOffset + 3] = ALPHABET[(inBuff) & 0x3f]; - return destination; - - case 2: - destination[destOffset] = ALPHABET[(inBuff >>> 18)]; - destination[destOffset + 1] = ALPHABET[(inBuff >>> 12) & 0x3f]; - destination[destOffset + 2] = ALPHABET[(inBuff >>> 6) & 0x3f]; - destination[destOffset + 3] = EQUALS_SIGN; - return destination; - - case 1: - destination[destOffset] = ALPHABET[(inBuff >>> 18)]; - destination[destOffset + 1] = ALPHABET[(inBuff >>> 12) & 0x3f]; - destination[destOffset + 2] = EQUALS_SIGN; - destination[destOffset + 3] = EQUALS_SIGN; - return destination; - - default: - return destination; - } // end switch - } // end encode3to4 - - - /** - * Performs Base64 encoding on the raw ByteBuffer, - * writing it to the encoded ByteBuffer. - * This is an experimental feature. Currently it does not - * pass along any options (such as {@link #DO_BREAK_LINES} - * or {@link #GZIP}. - * - * @param raw input buffer - * @param encoded output buffer - * @since 2.3 - */ - public static void encode(java.nio.ByteBuffer raw, java.nio.ByteBuffer encoded) { - byte[] raw3 = new byte[3]; - byte[] enc4 = new byte[4]; - - while (raw.hasRemaining()) { - int rem = Math.min(3, raw.remaining()); - raw.get(raw3, 0, rem); - Base64.encode3to4(enc4, raw3, rem, Base64.NO_OPTIONS); - encoded.put(enc4); - } // end input remaining - } - - - /** - * Performs Base64 encoding on the raw ByteBuffer, - * writing it to the encoded CharBuffer. - * This is an experimental feature. Currently it does not - * pass along any options (such as {@link #DO_BREAK_LINES} - * or {@link #GZIP}. - * - * @param raw input buffer - * @param encoded output buffer - * @since 2.3 - */ - public static void encode(java.nio.ByteBuffer raw, java.nio.CharBuffer encoded) { - byte[] raw3 = new byte[3]; - byte[] enc4 = new byte[4]; - - while (raw.hasRemaining()) { - int rem = Math.min(3, raw.remaining()); - raw.get(raw3, 0, rem); - Base64.encode3to4(enc4, raw3, rem, Base64.NO_OPTIONS); - for (int i = 0; i < 4; i++) { - encoded.put((char) (enc4[i] & 0xFF)); - } - } // end input remaining - } - - - - /** - * Encodes a byte array into Base64 notation. - * Does not GZip-compress data. - * - * @param source The data to convert - * @return The data in Base64-encoded form - * @throws NullPointerException if source array is null - * @since 1.4 - */ - public static String encodeBytes(byte[] source) { - // Since we're not going to have the GZIP encoding turned on, - // we're not going to have an java.io.IOException thrown, so - // we should not force the user to have to catch it. - String encoded = null; - try { - encoded = encodeBytes(source, 0, source.length, NO_OPTIONS); - } catch (java.io.IOException ex) { - // not sure why this was an assertion before, running with assertions disabled would mean swallowing this exception - throw new IllegalStateException(ex); - } // end catch - assert encoded != null; - return encoded; - } // end encodeBytes - - - /** - * Encodes a byte array into Base64 notation. - *

- * Example options:

-     *   GZIP: gzip-compresses object before encoding it.
-     *   DO_BREAK_LINES: break lines at 76 characters
-     *     Note: Technically, this makes your encoding non-compliant.
-     * 
- *

- * Example: encodeBytes( myData, Base64.GZIP ) or - *

- * Example: encodeBytes( myData, Base64.GZIP | Base64.DO_BREAK_LINES ) - *

As of v 2.3, if there is an error with the GZIP stream, - * the method will throw an java.io.IOException. This is new to v2.3! - * In earlier versions, it just returned a null value, but - * in retrospect that's a pretty poor way to handle it.

- * - * @param source The data to convert - * @param options Specified options - * @return The Base64-encoded data as a String - * @throws java.io.IOException if there is an error - * @throws NullPointerException if source array is null - * @see Base64#GZIP - * @see Base64#DO_BREAK_LINES - * @since 2.0 - */ - public static String encodeBytes(byte[] source, int options) throws java.io.IOException { - return encodeBytes(source, 0, source.length, options); - } // end encodeBytes - - /** - * Encodes a byte array into Base64 notation. - * Does not GZip-compress data. - *

- * As of v 2.3, if there is an error, - * the method will throw an java.io.IOException. This is new to v2.3! - * In earlier versions, it just returned a null value, but - * in retrospect that's a pretty poor way to handle it.

- * - * @param source The data to convert - * @param off Offset in array where conversion should begin - * @param len Length of data to convert - * @return The Base64-encoded data as a String - * @throws NullPointerException if source array is null - * @throws IllegalArgumentException if source array, offset, or length are invalid - * @since 1.4 - */ - public static String encodeBytes(byte[] source, int off, int len) { - // Since we're not going to have the GZIP encoding turned on, - // we're not going to have an java.io.IOException thrown, so - // we should not force the user to have to catch it. - String encoded = null; - try { - encoded = encodeBytes(source, off, len, NO_OPTIONS); - } catch (java.io.IOException ex) { - throw new IllegalStateException(ex); - } // end catch - assert encoded != null; - return encoded; - } // end encodeBytes - - - /** - * Encodes a byte array into Base64 notation. - *

- * Example options:

-     *   GZIP: gzip-compresses object before encoding it.
-     *   DO_BREAK_LINES: break lines at 76 characters
-     *     Note: Technically, this makes your encoding non-compliant.
-     * 
- *

- * Example: encodeBytes( myData, Base64.GZIP ) or - *

- * Example: encodeBytes( myData, Base64.GZIP | Base64.DO_BREAK_LINES ) - *

- * As of v 2.3, if there is an error with the GZIP stream, - * the method will throw an java.io.IOException. This is new to v2.3! - * In earlier versions, it just returned a null value, but - * in retrospect that's a pretty poor way to handle it. - * - * @param source The data to convert - * @param off Offset in array where conversion should begin - * @param len Length of data to convert - * @param options Specified options - * @return The Base64-encoded data as a String - * @throws java.io.IOException if there is an error - * @throws NullPointerException if source array is null - * @throws IllegalArgumentException if source array, offset, or length are invalid - * @see Base64#GZIP - * @see Base64#DO_BREAK_LINES - * @since 2.0 - */ - public static String encodeBytes(byte[] source, int off, int len, int options) throws java.io.IOException { - byte[] encoded = encodeBytesToBytes(source, off, len, options); - - // Return value according to relevant encoding. - return new String(encoded, PREFERRED_ENCODING); - - } // end encodeBytes - - - /** - * Similar to {@link #encodeBytes(byte[])} but returns - * a byte array instead of instantiating a String. This is more efficient - * if you're working with I/O streams and have large data sets to encode. - * - * @param source The data to convert - * @return The Base64-encoded data as a byte[] (of ASCII characters) - * @throws NullPointerException if source array is null - * @since 2.3.1 - */ - public static byte[] encodeBytesToBytes(byte[] source) { - byte[] encoded = null; - try { - encoded = encodeBytesToBytes(source, 0, source.length, Base64.NO_OPTIONS); - } catch (java.io.IOException ex) { - throw new IllegalStateException("IOExceptions only come from GZipping, which is turned off: ", ex); - } - return encoded; - } - - - /** - * Similar to {@link #encodeBytes(byte[], int, int, int)} but returns - * a byte array instead of instantiating a String. This is more efficient - * if you're working with I/O streams and have large data sets to encode. - * - * @param source The data to convert - * @param off Offset in array where conversion should begin - * @param len Length of data to convert - * @param options Specified options - * @return The Base64-encoded data as a String - * @throws java.io.IOException if there is an error - * @throws NullPointerException if source array is null - * @throws IllegalArgumentException if source array, offset, or length are invalid - * @see Base64#GZIP - * @see Base64#DO_BREAK_LINES - * @since 2.3.1 - */ - public static byte[] encodeBytesToBytes(byte[] source, int off, int len, int options) throws java.io.IOException { - Objects.requireNonNull(source, "Cannot serialize a null array."); - - if (off < 0) { - throw new IllegalArgumentException("Cannot have negative offset: " + off); - } // end if: off < 0 - - if (len < 0) { - throw new IllegalArgumentException("Cannot have length offset: " + len); - } // end if: len < 0 - - if (off + len > source.length) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Cannot have offset of %d and length of %d with array of length %d", off, len, source.length)); - } // end if: off < 0 - - // Compress? - if ((options & GZIP) != 0) { - return encodeCompressedBytes(source, off, len, options); - } // end if: compress - - // Else, don't compress. Better not to use streams at all then. - else { - return encodeNonCompressedBytes(source, off, len, options); - } // end else: don't compress - - } // end encodeBytesToBytes - - private static byte[] encodeNonCompressedBytes(byte[] source, int off, int len, int options) { - boolean breakLines = (options & DO_BREAK_LINES) != 0; - - //int len43 = len * 4 / 3; - //byte[] outBuff = new byte[ ( len43 ) // Main 4:3 - // + ( (len % 3) > 0 ? 4 : 0 ) // Account for padding - // + (breakLines ? ( len43 / MAX_LINE_LENGTH ) : 0) ]; // New lines - // Try to determine more precisely how big the array needs to be. - // If we get it right, we don't have to do an array copy, and - // we save a bunch of memory. - int encLen = (len / 3) * 4 + (len % 3 > 0 ? 4 : 0); // Bytes needed for actual encoding - if (breakLines) { - encLen += encLen / MAX_LINE_LENGTH; // Plus extra newline characters - } - byte[] outBuff = new byte[encLen]; - - - int d = 0; - int e = 0; - int len2 = len - 2; - int lineLength = 0; - for (; d < len2; d += 3, e += 4) { - encode3to4(source, d + off, 3, outBuff, e, options); - - lineLength += 4; - if (breakLines && lineLength >= MAX_LINE_LENGTH) { - outBuff[e + 4] = NEW_LINE; - e++; - lineLength = 0; - } // end if: end of line - } // en dfor: each piece of array - - if (d < len) { - encode3to4(source, d + off, len - d, outBuff, e, options); - e += 4; - } // end if: some padding needed - - - // Only resize array if we didn't guess it right. - if (e <= outBuff.length - 1) { - // If breaking lines and the last byte falls right at - // the line length (76 bytes per line), there will be - // one extra byte, and the array will need to be resized. - // Not too bad of an estimate on array size, I'd say. - byte[] finalOut = new byte[e]; - System.arraycopy(outBuff, 0, finalOut, 0, e); - //System.err.println("Having to resize array from " + outBuff.length + " to " + e ); - return finalOut; - } else { - //System.err.println("No need to resize array."); - return outBuff; - } - } - - private static byte[] encodeCompressedBytes(byte[] source, int off, int len, int options) throws IOException { - java.io.ByteArrayOutputStream baos = null; - java.util.zip.GZIPOutputStream gzos = null; - OutputStream b64os = null; - - try { - // GZip -> Base64 -> ByteArray - baos = new java.io.ByteArrayOutputStream(); - b64os = new OutputStream(baos, ENCODE | options); - gzos = new java.util.zip.GZIPOutputStream(b64os); - - gzos.write(source, off, len); - gzos.close(); - } // end try - catch (IOException e) { - // Catch it and then throw it immediately so that - // the finally{} block is called for cleanup. - throw e; - } // end catch - finally { - try { - gzos.close(); - } catch (Exception e) { - } - try { - b64os.close(); - } catch (Exception e) { - } - try { - baos.close(); - } catch (Exception e) { - } - } // end finally - - return baos.toByteArray(); - } - - -/* ******** D E C O D I N G M E T H O D S ******** */ - - - /** - * Decodes four bytes from array source - * and writes the resulting bytes (up to three of them) - * to destination. - * The source and destination arrays can be manipulated - * anywhere along their length by specifying - * srcOffset and destOffset. - * This method does not check to make sure your arrays - * are large enough to accommodate srcOffset + 4 for - * the source array or destOffset + 3 for - * the destination array. - * This method returns the actual number of bytes that - * were converted from the Base64 encoding. - *

This is the lowest level of the decoding methods with - * all possible parameters.

- * - * @param source the array to convert - * @param srcOffset the index where conversion begins - * @param destination the array to hold the conversion - * @param destOffset the index where output will be put - * @param options alphabet type is pulled from this (standard, url-safe, ordered) - * @return the number of decoded bytes converted - * @throws NullPointerException if source or destination arrays are null - * @throws IllegalArgumentException if srcOffset or destOffset are invalid - * or there is not enough room in the array. - * @since 1.3 - */ - private static int decode4to3(byte[] source, int srcOffset, byte[] destination, int destOffset, int options) { - // Lots of error checking and exception throwing - Objects.requireNonNull(source, "Source array was null."); - Objects.requireNonNull(destination, "Destination array was null."); - if (srcOffset < 0 || srcOffset + 3 >= source.length) { - throw new IllegalArgumentException(String.format(Locale.ROOT, - "Source array with length %d cannot have offset of %d and still process four bytes.", source.length, srcOffset)); - } // end if - if (destOffset < 0 || destOffset + 2 >= destination.length) { - throw new IllegalArgumentException(String.format(Locale.ROOT, - "Destination array with length %d cannot have offset of %d and still store three bytes.", destination.length, destOffset)); - } // end if - - byte[] DECODABET = getDecodabet(options); - - - // Two ways to do the same thing. Don't know which way I like best. - //int outBuff = ( ( DECODABET[ source[ srcOffset ] ] << 24 ) >>> 6 ) - // | ( ( DECODABET[ source[ srcOffset + 1] ] << 24 ) >>> 12 ); - int outBuff = ((DECODABET[source[srcOffset]] & 0xFF) << 18) - | ((DECODABET[source[srcOffset + 1]] & 0xFF) << 12); - - destination[destOffset] = (byte) (outBuff >>> 16); - - // Example: Dk== - if (source[srcOffset + 2] == EQUALS_SIGN) { - return 1; - } - - outBuff |= ((DECODABET[source[srcOffset + 2]] & 0xFF) << 6); - destination[destOffset + 1] = (byte) (outBuff >>> 8); - - // Example: DkL= - if (source[srcOffset + 3] == EQUALS_SIGN) { - return 2; - } - - outBuff |= ((DECODABET[source[srcOffset + 3]] & 0xFF)); - destination[destOffset + 2] = (byte) (outBuff); - - // Example: DkLE - return 3; - } - - - /** - * Low-level access to decoding ASCII characters in - * the form of a byte array. Ignores GUNZIP option, if - * it's set. This is not generally a recommended method, - * although it is used internally as part of the decoding process. - * Special case: if len = 0, an empty array is returned. Still, - * if you need more speed and reduced memory footprint (and aren't - * gzipping), consider this method. - * - * @param source The Base64 encoded data - * @return decoded data - * @since 2.3.1 - */ - public static byte[] decode(byte[] source) - throws java.io.IOException { - byte[] decoded = null; -// try { - decoded = decode(source, 0, source.length, Base64.NO_OPTIONS); -// } catch( java.io.IOException ex ) { -// assert false : "IOExceptions only come from GZipping, which is turned off: " + ex.getMessage(); -// } - return decoded; - } - - - /** - * Low-level access to decoding ASCII characters in - * the form of a byte array. Ignores GUNZIP option, if - * it's set. This is not generally a recommended method, - * although it is used internally as part of the decoding process. - * Special case: if len = 0, an empty array is returned. Still, - * if you need more speed and reduced memory footprint (and aren't - * gzipping), consider this method. - * - * @param source The Base64 encoded data - * @param off The offset of where to begin decoding - * @param len The length of characters to decode - * @param options Can specify options such as alphabet type to use - * @return decoded data - * @throws java.io.IOException If bogus characters exist in source data - * @since 1.3 - */ - public static byte[] decode(byte[] source, int off, int len, int options) throws java.io.IOException { - // Lots of error checking and exception throwing - Objects.requireNonNull(source, "Cannot decode null source array."); - if (off < 0 || off + len > source.length) { - throw new IllegalArgumentException(String.format(Locale.ROOT, - "Source array with length %d cannot have offset of %d and process %d bytes.", source.length, off, len)); - } // end if - - if (len == 0) { - return new byte[0]; - } else if (len < 4) { - throw new IllegalArgumentException( - "Base64-encoded string must have at least four characters, but length specified was " + len); - } // end if - - byte[] DECODABET = getDecodabet(options); - - int len34 = len * 3 / 4; // Estimate on array size - byte[] outBuff = new byte[len34]; // Upper limit on size of output - - int outBuffPosn = decode(source, off, len, options, DECODABET, outBuff); - - byte[] out = new byte[outBuffPosn]; - System.arraycopy(outBuff, 0, out, 0, outBuffPosn); - return out; - } // end decode - - private static int decode(byte[] source, int off, int len, int options, byte[] DECODABET, byte[] outBuff) throws IOException { - int outBuffPosn = 0; // Keep track of where we're writing - byte[] b4 = new byte[4]; // Four byte buffer from source, eliminating white space - int b4Posn = 0; // Keep track of four byte input buffer - for (int i = off; i < off + len; i++) { // Loop through source - - byte sbiDecode = DECODABET[source[i] & 0xFF]; - - // White space, Equals sign, or legit Base64 character - // Note the values such as -5 and -9 in the - // DECODABETs at the top of the file. - if (sbiDecode >= WHITE_SPACE_ENC) { - if (sbiDecode >= EQUALS_SIGN_ENC) { - b4[b4Posn++] = source[i]; // Save non-whitespace - if (b4Posn > 3) { // Time to decode? - outBuffPosn += decode4to3(b4, 0, outBuff, outBuffPosn, options); - b4Posn = 0; - - // If that was the equals sign, break out of 'for' loop - if (source[i] == EQUALS_SIGN) { - // check if the equals sign is somewhere in between - if (i+1 < len + off) { - throw new IOException(String.format(Locale.ROOT, - "Found equals sign at position %d of the base64 string, not at the end", i)); - } - break; - } // end if: equals sign - } // end if: quartet built - else { - if (source[i] == EQUALS_SIGN && len + off > i && source[i+1] != EQUALS_SIGN) { - throw new IOException(String.format(Locale.ROOT, - "Found equals sign at position %d of the base64 string, not at the end", i)); - } // enf if: equals sign and next character not as well - } // end else: - } // end if: equals sign or better - } // end if: white space, equals sign or better - else { - // There's a bad input character in the Base64 stream. - throw new IOException(String.format(Locale.ROOT, - "Bad Base64 input character decimal %d in array position %d", ((int) source[i]) & 0xFF, i)); - } // end else: - } // each input character - return outBuffPosn; - } - - - /** - * Decodes data from Base64 notation, automatically - * detecting gzip-compressed data and decompressing it. - * - * @param s the string to decode - * @return the decoded data - * @throws java.io.IOException If there is a problem - * @since 1.4 - */ - public static byte[] decode(String s) throws java.io.IOException { - return decode(s, NO_OPTIONS); - } - - - /** - * Decodes data from Base64 notation, automatically - * detecting gzip-compressed data and decompressing it. - * - * @param s the string to decode - * @param options encode options such as URL_SAFE - * @return the decoded data - * @throws java.io.IOException if there is an error - * @throws NullPointerException if s is null - * @since 1.4 - */ - public static byte[] decode(String s, int options) throws java.io.IOException { - - if (s == null) { - throw new NullPointerException("Input string was null."); - } // end if - - byte[] bytes = s.getBytes(PREFERRED_ENCODING); - // - - // Decode - bytes = decode(bytes, 0, bytes.length, options); - - // Check to see if it's gzip-compressed - // GZIP Magic Two-Byte Number: 0x8b1f (35615) - boolean dontGunzip = (options & DONT_GUNZIP) != 0; - if ((bytes != null) && (bytes.length >= 4) && (!dontGunzip)) { - - int head = ((int) bytes[0] & 0xff) | ((bytes[1] << 8) & 0xff00); - if (java.util.zip.GZIPInputStream.GZIP_MAGIC == head) { - java.io.ByteArrayInputStream bais = null; - java.util.zip.GZIPInputStream gzis = null; - java.io.ByteArrayOutputStream baos = null; - byte[] buffer = new byte[2048]; - int length = 0; - - try { - baos = new java.io.ByteArrayOutputStream(); - bais = new java.io.ByteArrayInputStream(bytes); - gzis = new java.util.zip.GZIPInputStream(bais); - - while ((length = gzis.read(buffer)) >= 0) { - baos.write(buffer, 0, length); - } // end while: reading input - - // No error? Get new bytes. - bytes = baos.toByteArray(); - - } // end try - catch (java.io.IOException e) { - // e.printStackTrace(); - // Just return originally-decoded bytes - } // end catch - finally { - try { - baos.close(); - } catch (Exception e) { - } - try { - gzis.close(); - } catch (Exception e) { - } - try { - bais.close(); - } catch (Exception e) { - } - } // end finally - - } // end if: gzipped - } // end if: bytes.length >= 2 - - return bytes; - } // end decode - - - - /* ******** I N N E R C L A S S I N P U T S T R E A M ******** */ - - - /** - * A {@link Base64.InputStream} will read data from another - * java.io.InputStream, given in the constructor, - * and encode/decode to/from Base64 notation on the fly. - * - * @see Base64 - * @since 1.3 - */ - public static class InputStream extends java.io.FilterInputStream { - - private boolean encode; // Encoding or decoding - private int position; // Current position in the buffer - private byte[] buffer; // Small buffer holding converted data - private int bufferLength; // Length of buffer (3 or 4) - private int numSigBytes; // Number of meaningful bytes in the buffer - private int lineLength; - private boolean breakLines; // Break lines at less than 80 characters - private int options; // Record options used to create the stream. - private byte[] decodabet; // Local copies to avoid extra method calls - - - /** - * Constructs a {@link Base64.InputStream} in DECODE mode. - * - * @param in the java.io.InputStream from which to read data. - * @since 1.3 - */ - public InputStream(java.io.InputStream in) { - this(in, DECODE); - } // end constructor - - - /** - * Constructs a {@link Base64.InputStream} in - * either ENCODE or DECODE mode. - *

- * Valid options:

-         *   ENCODE or DECODE: Encode or Decode as data is read.
-         *   DO_BREAK_LINES: break lines at 76 characters
-         *     (only meaningful when encoding)
-         * 
- *

- * Example: new Base64.InputStream( in, Base64.DECODE ) - * - * @param in the java.io.InputStream from which to read data. - * @param options Specified options - * @see Base64#ENCODE - * @see Base64#DECODE - * @see Base64#DO_BREAK_LINES - * @since 2.0 - */ - public InputStream(java.io.InputStream in, int options) { - - super(in); - this.options = options; // Record for later - this.breakLines = (options & DO_BREAK_LINES) > 0; - this.encode = (options & ENCODE) > 0; - this.bufferLength = encode ? 4 : 3; - this.buffer = new byte[bufferLength]; - this.position = -1; - this.lineLength = 0; - this.decodabet = getDecodabet(options); - } // end constructor - - /** - * Reads enough of the input stream to convert - * to/from Base64 and returns the next byte. - * - * @return next byte - * @since 1.3 - */ - @Override - public int read() throws java.io.IOException { - - // Do we need to get data? - if (position < 0) { - if (encode) { - byte[] b3 = new byte[3]; - int numBinaryBytes = 0; - for (int i = 0; i < 3; i++) { - int b = in.read(); - - // If end of stream, b is -1. - if (b >= 0) { - b3[i] = (byte) b; - numBinaryBytes++; - } else { - break; // out of for loop - } // end else: end of stream - - } // end for: each needed input byte - - if (numBinaryBytes > 0) { - encode3to4(b3, 0, numBinaryBytes, buffer, 0, options); - position = 0; - numSigBytes = 4; - } // end if: got data - else { - return -1; // Must be end of stream - } // end else - } // end if: encoding - - // Else decoding - else { - byte[] b4 = new byte[4]; - int i = 0; - for (i = 0; i < 4; i++) { - // Read four "meaningful" bytes: - int b = 0; - do { - b = in.read(); - } - while (b >= 0 && decodabet[b & 0x7f] <= WHITE_SPACE_ENC); - - if (b < 0) { - break; // Reads a -1 if end of stream - } // end if: end of stream - - b4[i] = (byte) b; - } // end for: each needed input byte - - if (i == 4) { - numSigBytes = decode4to3(b4, 0, buffer, 0, options); - position = 0; - } // end if: got four characters - else if (i == 0) { - return -1; - } // end else if: also padded correctly - else { - // Must have broken out from above. - throw new java.io.IOException("Improperly padded Base64 input."); - } // end - - } // end else: decode - } // end else: get data - - // Got data? - if (position >= 0) { - // End of relevant data? - if ( /*!encode &&*/ position >= numSigBytes) { - return -1; - } // end if: got data - - if (encode && breakLines && lineLength >= MAX_LINE_LENGTH) { - lineLength = 0; - return '\n'; - } // end if - else { - lineLength++; // This isn't important when decoding - // but throwing an extra "if" seems - // just as wasteful. - - int b = buffer[position++]; - - if (position >= bufferLength) { - position = -1; - } // end if: end - - return b & 0xFF; // This is how you "cast" a byte that's - // intended to be unsigned. - } // end else - } // end if: position >= 0 - - // Else error - else { - throw new java.io.IOException("Error in Base64 code reading stream."); - } // end else - } // end read - - - /** - * Calls {@link #read()} repeatedly until the end of stream - * is reached or len bytes are read. - * Returns number of bytes read into array or -1 if - * end of stream is encountered. - * - * @param dest array to hold values - * @param off offset for array - * @param len max number of bytes to read into array - * @return bytes read into array or -1 if end of stream is encountered. - * @since 1.3 - */ - @Override - public int read(byte[] dest, int off, int len) - throws java.io.IOException { - int i; - int b; - for (i = 0; i < len; i++) { - b = read(); - - if (b >= 0) { - dest[off + i] = (byte) b; - } else if (i == 0) { - return -1; - } else { - break; // Out of 'for' loop - } // Out of 'for' loop - } // end for: each byte read - return i; - } // end read - - } // end inner class InputStream - - - /* ******** I N N E R C L A S S O U T P U T S T R E A M ******** */ - - - /** - * A {@link Base64.OutputStream} will write data to another - * java.io.OutputStream, given in the constructor, - * and encode/decode to/from Base64 notation on the fly. - * - * @see Base64 - * @since 1.3 - */ - public static class OutputStream extends java.io.FilterOutputStream { - - private boolean encode; - private int position; - private byte[] buffer; - private int bufferLength; - private int lineLength; - private boolean breakLines; - private byte[] b4; // Scratch used in a few places - private boolean suspendEncoding; - private int options; // Record for later - private byte[] decodabet; // Local copies to avoid extra method calls - - /** - * Constructs a {@link Base64.OutputStream} in ENCODE mode. - * - * @param out the java.io.OutputStream to which data will be written. - * @since 1.3 - */ - public OutputStream(java.io.OutputStream out) { - this(out, ENCODE); - } // end constructor - - - /** - * Constructs a {@link Base64.OutputStream} in - * either ENCODE or DECODE mode. - *

- * Valid options:

-         *   ENCODE or DECODE: Encode or Decode as data is read.
-         *   DO_BREAK_LINES: don't break lines at 76 characters
-         *     (only meaningful when encoding)
-         * 
- *

- * Example: new Base64.OutputStream( out, Base64.ENCODE ) - * - * @param out the java.io.OutputStream to which data will be written. - * @param options Specified options. - * @see Base64#ENCODE - * @see Base64#DECODE - * @see Base64#DO_BREAK_LINES - * @since 1.3 - */ - public OutputStream(java.io.OutputStream out, int options) { - super(out); - this.breakLines = (options & DO_BREAK_LINES) != 0; - this.encode = (options & ENCODE) != 0; - this.bufferLength = encode ? 3 : 4; - this.buffer = new byte[bufferLength]; - this.position = 0; - this.lineLength = 0; - this.suspendEncoding = false; - this.b4 = new byte[4]; - this.options = options; - this.decodabet = getDecodabet(options); - } // end constructor - - - /** - * Writes the byte to the output stream after - * converting to/from Base64 notation. - * When encoding, bytes are buffered three - * at a time before the output stream actually - * gets a write() call. - * When decoding, bytes are buffered four - * at a time. - * - * @param theByte the byte to write - * @since 1.3 - */ - @Override - public void write(int theByte) - throws java.io.IOException { - // Encoding suspended? - if (suspendEncoding) { - this.out.write(theByte); - return; - } // end if: suspended - - // Encode? - if (encode) { - buffer[position++] = (byte) theByte; - if (position >= bufferLength) { // Enough to encode. - - this.out.write(encode3to4(b4, buffer, bufferLength, options)); - - lineLength += 4; - if (breakLines && lineLength >= MAX_LINE_LENGTH) { - this.out.write(NEW_LINE); - lineLength = 0; - } // end if: end of line - - position = 0; - } // end if: enough to output - } // end if: encoding - - // Else, Decoding - else { - // Meaningful Base64 character? - if (decodabet[theByte & 0x7f] > WHITE_SPACE_ENC) { - buffer[position++] = (byte) theByte; - if (position >= bufferLength) { // Enough to output. - - int len = Base64.decode4to3(buffer, 0, b4, 0, options); - out.write(b4, 0, len); - position = 0; - } // end if: enough to output - } // end if: meaningful base64 character - else if (decodabet[theByte & 0x7f] != WHITE_SPACE_ENC) { - throw new java.io.IOException("Invalid character in Base64 data."); - } // end else: not white space either - } // end else: decoding - } // end write - - - /** - * Calls {@link #write(int)} repeatedly until len - * bytes are written. - * - * @param theBytes array from which to read bytes - * @param off offset for array - * @param len max number of bytes to read into array - * @since 1.3 - */ - @Override - public void write(byte[] theBytes, int off, int len) - throws java.io.IOException { - // Encoding suspended? - if (suspendEncoding) { - this.out.write(theBytes, off, len); - return; - } // end if: suspended - - for (int i = 0; i < len; i++) { - write(theBytes[off + i]); - } // end for: each byte written - - } // end write - - - /** - * Method added by PHIL. [Thanks, PHIL. -Rob] - * This pads the buffer without closing the stream. - * - * @throws java.io.IOException if there's an error. - */ - public void flushBase64() throws java.io.IOException { - if (position > 0) { - if (encode) { - out.write(encode3to4(b4, buffer, position, options)); - position = 0; - } // end if: encoding - else { - throw new java.io.IOException("Base64 input not properly padded."); - } // end else: decoding - } // end if: buffer partially full - - } // end flush - - - /** - * Flushes and closes (I think, in the superclass) the stream. - * - * @since 1.3 - */ - @Override - public void close() throws java.io.IOException { - // 1. Ensure that pending characters are written - flushBase64(); - - // 2. Actually close the stream - // Base class both flushes and closes. - super.close(); - - buffer = null; - out = null; - } // end close - - - /** - * Suspends encoding of the stream. - * May be helpful if you need to embed a piece of - * base64-encoded data in a stream. - * - * @throws java.io.IOException if there's an error flushing - * @since 1.5.1 - */ - public void suspendEncoding() throws java.io.IOException { - flushBase64(); - this.suspendEncoding = true; - } // end suspendEncoding - - - /** - * Resumes encoding of the stream. - * May be helpful if you need to embed a piece of - * base64-encoded data in a stream. - * - * @since 1.5.1 - */ - public void resumeEncoding() { - this.suspendEncoding = false; - } // end resumeEncoding - - - } // end inner class OutputStream - - -} // end class Base64 diff --git a/core/src/main/java/org/elasticsearch/common/RandomBasedUUIDGenerator.java b/core/src/main/java/org/elasticsearch/common/RandomBasedUUIDGenerator.java index 9a3c35f3527..9f5e5f34a1b 100644 --- a/core/src/main/java/org/elasticsearch/common/RandomBasedUUIDGenerator.java +++ b/core/src/main/java/org/elasticsearch/common/RandomBasedUUIDGenerator.java @@ -21,6 +21,7 @@ package org.elasticsearch.common; import java.io.IOException; +import java.util.Base64; import java.util.Random; class RandomBasedUUIDGenerator implements UUIDGenerator { @@ -54,14 +55,6 @@ class RandomBasedUUIDGenerator implements UUIDGenerator { * We set only the MSB of the variant*/ randomBytes[8] &= 0x3f; /* clear the 2 most significant bits */ randomBytes[8] |= 0x80; /* set the variant (MSB is set)*/ - try { - byte[] encoded = Base64.encodeBytesToBytes(randomBytes, 0, randomBytes.length, Base64.URL_SAFE); - // we know the bytes are 16, and not a multi of 3, so remove the 2 padding chars that are added - assert encoded[encoded.length - 1] == '='; - assert encoded[encoded.length - 2] == '='; - return new String(encoded, 0, encoded.length - 2, Base64.PREFERRED_ENCODING); - } catch (IOException e) { - throw new IllegalStateException("should not be thrown"); - } + return Base64.getUrlEncoder().withoutPadding().encodeToString(randomBytes); } } diff --git a/core/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java b/core/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java index d1a22a17cda..8d507ae7f22 100644 --- a/core/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java +++ b/core/src/main/java/org/elasticsearch/common/TimeBasedUUIDGenerator.java @@ -19,8 +19,7 @@ package org.elasticsearch.common; - -import java.io.IOException; +import java.util.Base64; import java.util.concurrent.atomic.AtomicInteger; /** These are essentially flake ids (http://boundary.com/blog/2012/01/12/flake-a-decentralized-k-ordered-unique-id-generator-in-erlang) but @@ -80,15 +79,6 @@ class TimeBasedUUIDGenerator implements UUIDGenerator { assert 9 + SECURE_MUNGED_ADDRESS.length == uuidBytes.length; - byte[] encoded; - try { - encoded = Base64.encodeBytesToBytes(uuidBytes, 0, uuidBytes.length, Base64.URL_SAFE); - } catch (IOException e) { - throw new IllegalStateException("should not be thrown", e); - } - - // We are a multiple of 3 bytes so we should not see any padding: - assert encoded[encoded.length - 1] != '='; - return new String(encoded, 0, encoded.length, Base64.PREFERRED_ENCODING); + return Base64.getUrlEncoder().withoutPadding().encodeToString(uuidBytes); } } diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java index 7636097e288..858486d282c 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobPath.java @@ -30,6 +30,8 @@ import java.util.List; */ public class BlobPath implements Iterable { + private static final String SEPARATOR = "/"; + private final List paths; public BlobPath() { @@ -60,15 +62,12 @@ public class BlobPath implements Iterable { return new BlobPath(Collections.unmodifiableList(paths)); } - public String buildAsString(String separator) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < paths.size(); i++) { - sb.append(paths.get(i)); - if (i < (paths.size() - 1)) { - sb.append(separator); - } + public String buildAsString() { + String p = String.join(SEPARATOR, paths); + if (p.isEmpty()) { + return p; } - return sb.toString(); + return p + SEPARATOR; } @Override diff --git a/core/src/main/java/org/elasticsearch/common/joda/Joda.java b/core/src/main/java/org/elasticsearch/common/joda/Joda.java index b65a248c21f..cffea836ac2 100644 --- a/core/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/core/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -321,20 +321,15 @@ public class Joda { public static class EpochTimeParser implements DateTimeParser { - private static final Pattern MILLI_SECOND_PRECISION_PATTERN = Pattern.compile("^-?\\d{1,13}$"); - private static final Pattern SECOND_PRECISION_PATTERN = Pattern.compile("^-?\\d{1,10}$"); - private final boolean hasMilliSecondPrecision; - private final Pattern pattern; public EpochTimeParser(boolean hasMilliSecondPrecision) { this.hasMilliSecondPrecision = hasMilliSecondPrecision; - this.pattern = hasMilliSecondPrecision ? MILLI_SECOND_PRECISION_PATTERN : SECOND_PRECISION_PATTERN; } @Override public int estimateParsedLength() { - return hasMilliSecondPrecision ? 13 : 10; + return hasMilliSecondPrecision ? 19 : 16; } @Override @@ -344,8 +339,7 @@ public class Joda { if ((isPositive && isTooLong) || // timestamps have to have UTC timezone - bucket.getZone() != DateTimeZone.UTC || - pattern.matcher(text).matches() == false) { + bucket.getZone() != DateTimeZone.UTC) { return -1; } @@ -378,7 +372,7 @@ public class Joda { @Override public int estimatePrintedLength() { - return hasMilliSecondPrecision ? 13 : 10; + return hasMilliSecondPrecision ? 19 : 16; } @Override diff --git a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index eba89c2e02a..c49b0364e28 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -19,9 +19,10 @@ package org.elasticsearch.common.logging; +import org.apache.log4j.Java9Hack; import org.apache.log4j.PropertyConfigurator; +import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -87,14 +88,17 @@ public class LogConfigurator { replacements.put("ttcc", "org.apache.log4j.TTCCLayout"); replacements.put("xml", "org.apache.log4j.XMLLayout"); REPLACEMENTS = unmodifiableMap(replacements); + + if (Constants.JRE_IS_MINIMUM_JAVA9) { + Java9Hack.fixLog4j(); + } } private static boolean loaded; /** * Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders. - * - * @param settings custom settings that should be applied + * @param settings custom settings that should be applied * @param resolveConfig controls whether the logging conf file should be read too or not. */ public static void configure(Settings settings, boolean resolveConfig) { @@ -109,7 +113,7 @@ public class LogConfigurator { if (resolveConfig) { resolveConfig(environment, settingsBuilder); } - settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties()); + // add custom settings after config was added so that they are not overwritten by config settingsBuilder.put(settings); settingsBuilder.replacePropertyPlaceholders(); diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index c0b650e555d..201e5297511 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -19,11 +19,6 @@ package org.elasticsearch.common.network; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import org.elasticsearch.action.support.replication.ReplicationTask; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.client.transport.support.TransportProxyClient; @@ -36,7 +31,6 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandReg import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -71,6 +65,12 @@ import org.elasticsearch.rest.action.admin.cluster.snapshots.restore.RestRestore import org.elasticsearch.rest.action.admin.cluster.snapshots.status.RestSnapshotsStatusAction; import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; import org.elasticsearch.rest.action.admin.cluster.stats.RestClusterStatsAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestDeleteSearchTemplateAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestDeleteStoredScriptAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestGetSearchTemplateAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestGetStoredScriptAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestPutSearchTemplateAction; +import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestPutStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.tasks.RestPendingClusterTasksAction; import org.elasticsearch.rest.action.admin.indices.alias.RestIndicesAliasesAction; import org.elasticsearch.rest.action.admin.indices.alias.delete.RestIndexDeleteAliasesAction; @@ -137,19 +137,11 @@ import org.elasticsearch.rest.action.ingest.RestGetPipelineAction; import org.elasticsearch.rest.action.ingest.RestPutPipelineAction; import org.elasticsearch.rest.action.ingest.RestSimulatePipelineAction; import org.elasticsearch.rest.action.main.RestMainAction; -import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction; -import org.elasticsearch.rest.action.percolate.RestPercolateAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestDeleteStoredScriptAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestGetStoredScriptAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestPutStoredScriptAction; import org.elasticsearch.rest.action.search.RestClearScrollAction; import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.rest.action.suggest.RestSuggestAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestDeleteSearchTemplateAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestGetSearchTemplateAction; -import org.elasticsearch.rest.action.admin.cluster.storedscripts.RestPutSearchTemplateAction; import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction; import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; import org.elasticsearch.rest.action.update.RestUpdateAction; @@ -159,6 +151,9 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.elasticsearch.transport.netty.NettyTransport; +import java.util.Arrays; +import java.util.List; + /** * A module to handle registering and binding all network related classes. */ @@ -250,8 +245,6 @@ public class NetworkModule extends AbstractModule { RestMultiTermVectorsAction.class, RestBulkAction.class, RestUpdateAction.class, - RestPercolateAction.class, - RestMultiPercolateAction.class, RestSearchAction.class, RestSearchScrollAction.class, @@ -404,7 +397,7 @@ public class NetworkModule extends AbstractModule { * @param commandName the names under which the command should be parsed. The {@link ParseField#getPreferredName()} is special because * it is the name under which the command's reader is registered. */ - public void registerAllocationCommand(Writeable.Reader reader, AllocationCommand.Parser parser, + private void registerAllocationCommand(Writeable.Reader reader, AllocationCommand.Parser parser, ParseField commandName) { allocationCommandRegistry.register(parser, commandName); namedWriteableRegistry.register(AllocationCommand.class, commandName.getPreferredName(), reader); diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 5b6130281d4..36ee01484e6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -375,7 +375,6 @@ public final class ClusterSettings extends AbstractScopedSettings { BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX, ClusterName.CLUSTER_NAME_SETTING, Client.CLIENT_TYPE_SETTING_S, - InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING, ThreadContext.DEFAULT_HEADERS_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 1b795239457..027100b3469 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.gateway.PrimaryShardAllocator; @@ -35,12 +36,10 @@ import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.IndexWarmer; import org.elasticsearch.indices.IndicesRequestCache; import java.util.Arrays; @@ -59,6 +58,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { public static final Predicate INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX); public static final Set> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY, IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING, IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, @@ -126,7 +126,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.IGNORE_MALFORMED_SETTING, FieldMapper.COERCE_SETTING, Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, - PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 1efb65c18b1..1be1fa8f3f4 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -537,6 +537,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); } + public static Setting boolSetting(String key, Function defaultValueFn, Property... properties) { + return new Setting<>(key, defaultValueFn, Booleans::parseBooleanExact, properties); + } + public static Setting byteSizeSetting(String key, String percentage, Property... properties) { return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 8488ca75c73..15554e5ccaa 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -58,9 +58,11 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.common.unit.SizeValue.parseSizeValue; @@ -942,89 +944,54 @@ public final class Settings implements ToXContent { return this; } - /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - map.put(key.substring(prefix.length()), value); + public Builder putProperties(Map esSettings, Predicate keyPredicate, Function keyFunction) { + for (final Map.Entry esSetting : esSettings.entrySet()) { + final String key = esSetting.getKey(); + if (keyPredicate.test(key)) { + map.put(keyFunction.apply(key), esSetting.getValue()); } } return this; } /** - * Puts all the properties with keys starting with the provided prefix. - * - * @param prefix The prefix to filter property key by - * @param properties The properties to put - * @return The builder - */ - public Builder putProperties(String prefix, Dictionary properties, String ignorePrefix) { - for (Object property : Collections.list(properties.keys())) { - String key = Objects.toString(property); - String value = Objects.toString(properties.get(property)); - if (key.startsWith(prefix)) { - if (!key.startsWith(ignorePrefix)) { - map.put(key.substring(prefix.length()), value); - } - } - } - return this; - } - - /** - * Runs across all the settings set on this builder and replaces ${...} elements in the - * each setting value according to the following logic: - *

- * First, tries to resolve it against a System property ({@link System#getProperty(String)}), next, - * tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries - * and replace it with another setting already set on this builder. + * Runs across all the settings set on this builder and + * replaces ${...} elements in each setting with + * another setting already set on this builder. */ public Builder replacePropertyPlaceholders() { + return replacePropertyPlaceholders(System::getenv); + } + + // visible for testing + Builder replacePropertyPlaceholders(Function getenv) { PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() { - @Override - public String resolvePlaceholder(String placeholderName) { - if (placeholderName.startsWith("env.")) { - // explicit env var prefix - return System.getenv(placeholderName.substring("env.".length())); - } - String value = System.getProperty(placeholderName); - if (value != null) { - return value; - } - value = System.getenv(placeholderName); - if (value != null) { - return value; - } - return map.get(placeholderName); + @Override + public String resolvePlaceholder(String placeholderName) { + final String value = getenv.apply(placeholderName); + if (value != null) { + return value; } + return map.get(placeholderName); + } - @Override - public boolean shouldIgnoreMissing(String placeholderName) { - // if its an explicit env var, we are ok with not having a value for it and treat it as optional - if (placeholderName.startsWith("env.") || placeholderName.startsWith("prompt.")) { - return true; - } - return false; - } - - @Override - public boolean shouldRemoveMissingPlaceholder(String placeholderName) { - if (placeholderName.startsWith("prompt.")) { - return false; - } + @Override + public boolean shouldIgnoreMissing(String placeholderName) { + if (placeholderName.startsWith("prompt.")) { return true; } - }; + return false; + } + + @Override + public boolean shouldRemoveMissingPlaceholder(String placeholderName) { + if (placeholderName.startsWith("prompt.")) { + return false; + } + return true; + } + }; for (Map.Entry entry : new HashMap<>(map).entrySet()) { String value = propertyPlaceholder.replacePlaceholders(entry.getKey(), entry.getValue(), placeholderResolver); // if the values exists and has length, we should maintain it in the map diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java index 15cd0e2bf63..f074a3ec09c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java @@ -62,6 +62,24 @@ public class GatewayAllocator extends AbstractComponent { this.replicaShardAllocator = new InternalReplicaShardAllocator(settings, storeAction); } + /** + * Returns true if the given shard has an async fetch pending + */ + public boolean hasFetchPending(ShardId shardId, boolean primary) { + if (primary) { + AsyncShardFetch fetch = asyncFetchStarted.get(shardId); + if (fetch != null) { + return fetch.getNumberOfInFlightFetches() > 0; + } + } else { + AsyncShardFetch fetch = asyncFetchStore.get(shardId); + if (fetch != null) { + return fetch.getNumberOfInFlightFetches() > 0; + } + } + return false; + } + public void setReallocation(final ClusterService clusterService, final RoutingService routingService) { this.routingService = routingService; clusterService.add(new ClusterStateListener() { diff --git a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java index d2e3d7f42cf..8b6e425c26a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/ReplicaShardAllocator.java @@ -108,7 +108,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent { currentNode, nodeWithHighestMatch); it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA, "existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]", - null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); + null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis())); changed = true; } } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 60e3250e49d..f5e5ce91d80 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -50,7 +50,6 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexEventListener; @@ -151,11 +150,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.indexStore = indexStore; indexFieldData.setListener(new FieldDataCacheListener(this)); this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); - PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(indexSettings, IndexService.this::newQueryShardContext); this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool, - bitsetFilterCache.createListener(threadPool), - percolatorQueryCache.createListener(threadPool)); - this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache, percolatorQueryCache); + bitsetFilterCache.createListener(threadPool)); + this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache); this.engineFactory = engineFactory; // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE this.searcherWrapper = wrapperFactory.newWrapper(this); @@ -239,8 +236,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC } } } finally { - IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask, - cache().getPercolatorQueryCache()); + IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask); } } } @@ -443,7 +439,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC return new QueryShardContext( indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(), - nodeServicesProvider.getClient(), indexCache.getPercolatorQueryCache(), indexReader, + nodeServicesProvider.getClient(), indexReader, nodeServicesProvider.getClusterService().state() ); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java b/core/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java index 5584cc54df2..90c6ad3f3b4 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java @@ -88,6 +88,7 @@ public interface CharMatcher { case Character.CURRENCY_SYMBOL: case Character.MATH_SYMBOL: case Character.OTHER_SYMBOL: + case Character.MODIFIER_SYMBOL: return true; default: return false; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java index f7bf44256cc..985a081ccc8 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzer.java @@ -33,13 +33,11 @@ import org.apache.lucene.analysis.util.CharArraySet; public final class FingerprintAnalyzer extends Analyzer { private final char separator; private final int maxOutputSize; - private final boolean preserveOriginal; private final CharArraySet stopWords; - public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize, boolean preserveOriginal) { + public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) { this.separator = separator; this.maxOutputSize = maxOutputSize; - this.preserveOriginal = preserveOriginal; this.stopWords = stopWords; } @@ -48,7 +46,7 @@ public final class FingerprintAnalyzer extends Analyzer { final Tokenizer tokenizer = new StandardTokenizer(); TokenStream stream = tokenizer; stream = new LowerCaseFilter(stream); - stream = new ASCIIFoldingFilter(stream, preserveOriginal); + stream = new ASCIIFoldingFilter(stream, false); stream = new StopFilter(stream, stopWords); stream = new FingerprintFilter(stream, maxOutputSize, separator); return new TokenStreamComponents(tokenizer, stream); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java index 897068cbf8b..bb8a51e0969 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/FingerprintAnalyzerProvider.java @@ -34,10 +34,8 @@ import org.elasticsearch.index.IndexSettings; public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider { public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE; - public static ParseField PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.PRESERVE_ORIGINAL; public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE; - public static boolean DEFAULT_PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.DEFAULT_PRESERVE_ORIGINAL; public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; private final FingerprintAnalyzer analyzer; @@ -47,10 +45,9 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvidernewMapBuilder().putAll(segmentInfos.getUserData()).immutableMap(); // lucene calls the current generation, last generation. generation = segmentInfos.getLastGeneration(); - if (segmentInfos.getId() != null) { // id is only written starting with Lucene 5.0 - id = Base64.encodeBytes(segmentInfos.getId()); - } + id = Base64.getEncoder().encodeToString(segmentInfos.getId()); numDocs = Lucene.getNumDocs(segmentInfos); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 69aee4ac680..d5fedcf0343 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -28,22 +28,18 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SegmentCommitInfo; -import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; -import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -65,7 +61,6 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; @@ -74,7 +69,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.util.Arrays; -import java.util.Collection; +import java.util.Base64; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -1092,7 +1087,7 @@ public abstract class Engine implements Closeable { @Override public String toString() { - return Base64.encodeBytes(id); + return Base64.getEncoder().encodeToString(id); } public boolean idsEqual(byte[] id) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 0635a532a58..adb87929b84 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectHashSet; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; -import org.apache.lucene.document.FieldType; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.Version; import org.elasticsearch.common.compress.CompressedXContent; @@ -35,15 +34,12 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.InvalidTypeNameException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.script.ScriptService; -import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -97,6 +93,8 @@ public class MapperService extends AbstractIndexComponent { "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" ); + @Deprecated + public static final String PERCOLATOR_LEGACY_TYPE_NAME = ".percolator"; private final AnalysisService analysisService; @@ -269,7 +267,6 @@ public class MapperService extends AbstractIndexComponent { checkNestedFieldsLimit(fullPathObjectMappers); checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); checkDepthLimit(fullPathObjectMappers.keySet()); - checkPercolatorFieldLimit(fieldTypes); } Set parentTypes = this.parentTypes; @@ -321,7 +318,7 @@ public class MapperService extends AbstractIndexComponent { private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) { boolean legacyIndex = getIndexSettings().getIndexVersionCreated().before(Version.V_5_0_0_alpha1); if (legacyIndex) { - return mapper.type().startsWith(".") && !PercolatorFieldMapper.LEGACY_TYPE_NAME.equals(mapper.type()); + return mapper.type().startsWith(".") && !PERCOLATOR_LEGACY_TYPE_NAME.equals(mapper.type()); } else { return mapper.type().startsWith("."); } @@ -432,25 +429,6 @@ public class MapperService extends AbstractIndexComponent { } } - /** - * We only allow upto 1 percolator field per index. - * - * Reasoning here is that the PercolatorQueryCache only supports a single document having a percolator query. - * Also specifying multiple queries per document feels like an anti pattern - */ - private void checkPercolatorFieldLimit(Iterable fieldTypes) { - List percolatorFieldTypes = new ArrayList<>(); - for (MappedFieldType fieldType : fieldTypes) { - if (fieldType instanceof PercolatorFieldMapper.PercolatorFieldType) { - percolatorFieldTypes.add(fieldType.name()); - } - } - if (percolatorFieldTypes.size() > 1) { - throw new IllegalArgumentException("Up to one percolator field type is allowed per index, " + - "found the following percolator fields [" + percolatorFieldTypes + "]"); - } - } - public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { return documentParser.parse(mappingType, mappingSource, applyDefault ? defaultMappingSource : null); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 1ca67a8cbc0..ff6d943b72a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -26,8 +26,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -45,6 +43,7 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import java.io.IOException; +import java.util.Base64; import java.util.List; import java.util.Map; @@ -124,11 +123,7 @@ public class BinaryFieldMapper extends FieldMapper { } else if (value instanceof byte[]) { bytes = new BytesArray((byte[]) value); } else { - try { - bytes = new BytesArray(Base64.decode(value.toString())); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to convert bytes", e); - } + bytes = new BytesArray(Base64.getDecoder().decode(value.toString())); } return bytes; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 408641fb67e..f56b04c92aa 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -85,7 +85,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) { + if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha3)) { throw new MapperParsingException(NAME + " is not configurable"); } return new Builder(parserContext.mapperService().fullName(NAME)); diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java deleted file mode 100644 index e7ca6c3d427..00000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCache.java +++ /dev/null @@ -1,294 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator; - -import com.carrotsearch.hppc.IntObjectHashMap; -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.cache.Cache; -import org.elasticsearch.common.cache.CacheBuilder; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.AbstractIndexComponent; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexWarmer; -import org.elasticsearch.index.IndexWarmer.TerminationHandle; -import org.elasticsearch.index.engine.Engine.Searcher; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.query.PercolateQuery; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardUtils; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.Closeable; -import java.io.IOException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; -import java.util.function.Supplier; - -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.LEGACY_TYPE_NAME; -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.PercolatorFieldType; -import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery; - -public final class PercolatorQueryCache extends AbstractIndexComponent - implements Closeable, LeafReader.CoreClosedListener, PercolateQuery.QueryRegistry { - - public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = - Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); - - public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; - - private final Supplier queryShardContextSupplier; - private final Cache cache; - private final boolean mapUnmappedFieldsAsString; - - public PercolatorQueryCache(IndexSettings indexSettings, Supplier queryShardContextSupplier) { - super(indexSettings); - this.queryShardContextSupplier = queryShardContextSupplier; - cache = CacheBuilder.builder().build(); - this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); - } - - @Override - public Leaf getQueries(LeafReaderContext ctx) { - QueriesLeaf percolatorQueries = cache.get(ctx.reader().getCoreCacheKey()); - if (percolatorQueries == null) { - throw new IllegalStateException("queries not loaded, queries should be have been preloaded during index warming..."); - } - return percolatorQueries; - } - - public IndexWarmer.Listener createListener(ThreadPool threadPool) { - return new IndexWarmer.Listener() { - - final Executor executor = threadPool.executor(ThreadPool.Names.WARMER); - - @Override - public TerminationHandle warmReader(IndexShard indexShard, Searcher searcher) { - final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size()); - for (final LeafReaderContext ctx : searcher.reader().leaves()) { - if (cache.get(ctx.reader().getCoreCacheKey()) != null) { - latch.countDown(); - continue; - } - executor.execute(() -> { - try { - final long start = System.nanoTime(); - QueriesLeaf queries = loadQueries(ctx, indexShard); - cache.put(ctx.reader().getCoreCacheKey(), queries); - if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace( - "loading percolator queries took [{}]", - TimeValue.timeValueNanos(System.nanoTime() - start) - ); - } - } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to load percolator queries", t); - } finally { - latch.countDown(); - } - }); - } - return () -> latch.await(); - } - }; - } - - QueriesLeaf loadQueries(LeafReaderContext context, IndexShard indexShard) throws IOException { - Version indexVersionCreated = indexShard.indexSettings().getIndexVersionCreated(); - MapperService mapperService = indexShard.mapperService(); - LeafReader leafReader = context.reader(); - ShardId shardId = ShardUtils.extractShardId(leafReader); - if (shardId == null) { - throw new IllegalStateException("can't resolve shard id"); - } - if (indexSettings.getIndex().equals(shardId.getIndex()) == false) { - // percolator cache insanity - String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " + - indexSettings.getIndex(); - throw new IllegalStateException(message); - } - - IntObjectHashMap queries = new IntObjectHashMap<>(); - boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1); - if (legacyLoading) { - PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, LEGACY_TYPE_NAME), PostingsEnum.NONE); - if (postings != null) { - LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); - for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) { - leafReader.document(docId, visitor); - queries.put(docId, parseLegacyPercolatorDocument(docId, visitor.source)); - visitor.source = null; // reset - } - } - } else { - // Each type can have one percolator field mapper, - // So for each type we check if there is a percolator field mapper - // and parse all the queries for the documents of that type. - IndexSearcher indexSearcher = new IndexSearcher(leafReader); - for (DocumentMapper documentMapper : mapperService.docMappers(false)) { - Weight queryWeight = indexSearcher.createNormalizedWeight(documentMapper.typeFilter(), false); - for (FieldMapper fieldMapper : documentMapper.mappers()) { - if (fieldMapper instanceof PercolatorFieldMapper) { - PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType(); - BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); - if (binaryDocValues != null) { - // use the same leaf reader context the indexSearcher is using too: - Scorer scorer = queryWeight.scorer(leafReader.getContext()); - if (scorer != null) { - DocIdSetIterator iterator = scorer.iterator(); - for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { - BytesRef qbSource = binaryDocValues.get(docId); - if (qbSource.length > 0) { - queries.put(docId, parseQueryBuilder(docId, qbSource)); - } - } - } - } - break; - } - } - } - } - leafReader.addCoreClosedListener(this); - return new QueriesLeaf(shardId, queries); - } - - private Query parseQueryBuilder(int docId, BytesRef qbSource) { - XContent xContent = QUERY_BUILDER_CONTENT_TYPE.xContent(); - try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { - QueryShardContext context = queryShardContextSupplier.get(); - return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); - } catch (IOException e) { - throw new PercolatorException(index(), "failed to parse query builder for document [" + docId + "]", e); - } - } - - private Query parseLegacyPercolatorDocument(int docId, BytesReference source) { - try (XContentParser sourceParser = XContentHelper.createParser(source)) { - String currentFieldName = null; - XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT - if (token != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT"); - } - while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = sourceParser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("query".equals(currentFieldName)) { - QueryShardContext context = queryShardContextSupplier.get(); - return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); - } else { - sourceParser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - sourceParser.skipChildren(); - } - } - } catch (Exception e) { - throw new PercolatorException(index(), "failed to parse query [" + docId + "]", e); - } - return null; - } - - public PercolatorQueryCacheStats getStats(ShardId shardId) { - int numberOfQueries = 0; - for (QueriesLeaf queries : cache.values()) { - if (shardId.equals(queries.shardId)) { - numberOfQueries += queries.queries.size(); - } - } - return new PercolatorQueryCacheStats(numberOfQueries); - } - - @Override - public void onClose(Object cacheKey) throws IOException { - cache.invalidate(cacheKey); - } - - @Override - public void close() throws IOException { - cache.invalidateAll(); - } - - final static class LegacyQueryFieldVisitor extends StoredFieldVisitor { - - private BytesArray source; - - @Override - public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException { - source = new BytesArray(bytes); - } - - @Override - public Status needsField(FieldInfo fieldInfo) throws IOException { - if (source != null) { - return Status.STOP; - } - if (SourceFieldMapper.NAME.equals(fieldInfo.name)) { - return Status.YES; - } else { - return Status.NO; - } - } - - } - - final static class QueriesLeaf implements Leaf { - - final ShardId shardId; - final IntObjectHashMap queries; - - QueriesLeaf(ShardId shardId, IntObjectHashMap queries) { - this.shardId = shardId; - this.queries = queries; - } - - @Override - public Query getQuery(int docId) { - return queries.get(docId); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java deleted file mode 100644 index cbc21286e20..00000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueryCacheStats.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.percolator; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * Exposes percolator query cache statistics. - */ -public class PercolatorQueryCacheStats implements Streamable, ToXContent { - - private long numQueries; - - /** - * Noop constructor for serialization purposes. - */ - public PercolatorQueryCacheStats() { - } - - PercolatorQueryCacheStats(long numQueries) { - this.numQueries = numQueries; - } - - /** - * @return The total number of loaded percolate queries. - */ - public long getNumQueries() { - return numQueries; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(Fields.PERCOLATOR); - builder.field(Fields.QUERIES, getNumQueries()); - builder.endObject(); - return builder; - } - - public void add(PercolatorQueryCacheStats percolate) { - if (percolate == null) { - return; - } - - numQueries += percolate.getNumQueries(); - } - - static final class Fields { - static final String PERCOLATOR = "percolator"; - static final String QUERIES = "num_queries"; - } - - public static PercolatorQueryCacheStats readPercolateStats(StreamInput in) throws IOException { - PercolatorQueryCacheStats stats = new PercolatorQueryCacheStats(); - stats.readFrom(in); - return stats; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - numQueries = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVLong(numQueries); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java index 7afbecad4d3..ea22461243c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoDistanceRangeQueryBuilder.java @@ -340,7 +340,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder { */ public MatchQueryBuilder prefixLength(int prefixLength) { if (prefixLength < 0 ) { - throw new IllegalArgumentException("No negative prefix length allowed."); + throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative."); } this.prefixLength = prefixLength; return this; @@ -284,8 +284,8 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { * When using fuzzy or prefix type query, the number of term expansions to use. */ public MatchQueryBuilder maxExpansions(int maxExpansions) { - if (maxExpansions < 0 ) { - throw new IllegalArgumentException("No negative maxExpansions allowed."); + if (maxExpansions <= 0 ) { + throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive."); } this.maxExpansions = maxExpansions; return this; diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 756be4c3032..d048f73381e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -839,24 +839,6 @@ public abstract class QueryBuilders { return new ExistsQueryBuilder(name); } - public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, BytesReference document) { - return new PercolateQueryBuilder(queryField, documentType, document); - } - - public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, String indexedDocumentIndex, - String indexedDocumentType, String indexedDocumentId) { - return new PercolateQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, - null, null, null); - } - - public static PercolateQueryBuilder percolateQuery(String queryField, String documentType, String indexedDocumentIndex, - String indexedDocumentType, String indexedDocumentId, - String indexedDocumentRouting, String indexedDocumentPreference, - Long indexedDocumentVersion) { - return new PercolateQueryBuilder(queryField, documentType, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, - indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion); - } - private QueryBuilders() { } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 4aa72728bc8..56a75a69138 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -51,7 +51,6 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -82,7 +81,6 @@ public class QueryShardContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private final MapperQueryParser queryParser = new MapperQueryParser(this); private final IndicesQueriesRegistry indicesQueriesRegistry; - private final PercolatorQueryCache percolatorQueryCache; private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; @@ -90,7 +88,7 @@ public class QueryShardContext extends QueryRewriteContext { public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService, - final IndicesQueriesRegistry indicesQueriesRegistry, Client client, PercolatorQueryCache percolatorQueryCache, + final IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader, ClusterState clusterState) { super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState); this.indexSettings = indexSettings; @@ -100,14 +98,13 @@ public class QueryShardContext extends QueryRewriteContext { this.indexFieldDataService = indexFieldDataService; this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.indicesQueriesRegistry = indicesQueriesRegistry; - this.percolatorQueryCache = percolatorQueryCache; this.nestedScope = new NestedScope(); } public QueryShardContext(QueryShardContext source) { this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client, - source.percolatorQueryCache, source.reader, source.clusterState); + source.reader, source.clusterState); this.types = source.getTypes(); } @@ -123,10 +120,6 @@ public class QueryShardContext extends QueryRewriteContext { return mapperService.analysisService(); } - public PercolatorQueryCache getPercolatorQueryCache() { - return percolatorQueryCache; - } - public Similarity getSearchSimilarity() { return similarityService != null ? similarityService.similarity(mapperService) : null; } @@ -178,7 +171,12 @@ public class QueryShardContext extends QueryRewriteContext { return isFilter; } - void setIsFilter(boolean isFilter) { + /** + * Public for testing only! + * + * Sets whether we are currently parsing a filter or a query + */ + public void setIsFilter(boolean isFilter) { this.isFilter = isFilter; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index a503b708633..cebc72c077b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Objects; @@ -64,7 +65,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses() { - return this.clauses; + return Collections.unmodifiableList(this.clauses); } /** @@ -198,7 +202,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { if (initialClause == null) { - throw new IllegalArgumentException("query must include at least one clause"); + throw new IllegalArgumentException("[" + NAME + "] must include at least one clause"); } clauses.add(initialClause); } @@ -68,9 +69,12 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder writeQueries(out, clauses); } - public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + /** + * Add a span clause to the current list of clauses + */ + public SpanOrQueryBuilder addClause(SpanQueryBuilder clause) { if (clause == null) { - throw new IllegalArgumentException("inner bool query clause cannot be null"); + throw new IllegalArgumentException("[" + NAME + "] inner clause cannot be null"); } clauses.add(clause); return this; @@ -80,7 +84,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder * @return the {@link SpanQueryBuilder} clauses that were set for this query */ public List clauses() { - return this.clauses; + return Collections.unmodifiableList(this.clauses); } @Override @@ -137,7 +141,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(clauses.get(0)); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clause(clauses.get(i)); + queryBuilder.addClause(clauses.get(i)); } queryBuilder.boost(boost); queryBuilder.queryName(queryName); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 768db935308..2cadc1ad24b 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -576,16 +576,22 @@ public class IndexShard extends AbstractIndexShardComponent { long bytes = getEngine().getIndexBufferRAMBytesUsed(); writingBytes.addAndGet(bytes); try { - logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); } finally { - logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + if (logger.isTraceEnabled()) { + logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + } writingBytes.addAndGet(-bytes); } } else { - logger.debug("refresh with source [{}]", source); + if (logger.isTraceEnabled()) { + logger.trace("refresh with source [{}]", source); + } long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 84278fa92b3..0109995f80f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -39,7 +39,6 @@ import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReentrantLock; public class TranslogWriter extends BaseTranslogReader implements Closeable { @@ -154,7 +153,9 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { /** * returns true if there are buffered ops */ - public boolean syncNeeded() { return totalOffset != lastSyncedOffset; } + public boolean syncNeeded() { + return totalOffset != lastSyncedOffset; + } @Override public int totalOperations() { @@ -169,40 +170,55 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { /** * closes this writer and transfers it's underlying file channel to a new immutable reader */ - public synchronized TranslogReader closeIntoReader() throws IOException { - try { - sync(); // sync before we close.. - } catch (IOException e) { - closeWithTragicEvent(e); - throw e; - } - if (closed.compareAndSet(false, true)) { - boolean success = false; - try { - final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); - success = true; - return reader; - } finally { - if (success == false) { - // close the channel, as we are closed and failed to create a new reader - IOUtils.closeWhileHandlingException(channel); + public TranslogReader closeIntoReader() throws IOException { + // make sure to acquire the sync lock first, to prevent dead locks with threads calling + // syncUpTo() , where the sync lock is acquired first, following by the synchronize(this) + // + // Note: While this is not strictly needed as this method is called while blocking all ops on the translog, + // we do this to for correctness and preventing future issues. + synchronized (syncLock) { + synchronized (this) { + try { + sync(); // sync before we close.. + } catch (IOException e) { + closeWithTragicEvent(e); + throw e; + } + if (closed.compareAndSet(false, true)) { + boolean success = false; + try { + final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + success = true; + return reader; + } finally { + if (success == false) { + // close the channel, as we are closed and failed to create a new reader + IOUtils.closeWhileHandlingException(channel); + } + } + } else { + throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } - } else { - throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } @Override - public synchronized Translog.Snapshot newSnapshot() { - ensureOpen(); - try { - sync(); - } catch (IOException e) { - throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + public Translog.Snapshot newSnapshot() { + // make sure to acquire the sync lock first, to prevent dead locks with threads calling + // syncUpTo() , where the sync lock is acquired first, following by the synchronize(this) + synchronized (syncLock) { + synchronized (this) { + ensureOpen(); + try { + sync(); + } catch (IOException e) { + throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + } + return super.newSnapshot(); + } } - return super.newSnapshot(); } private long getWrittenOffset() throws IOException { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index 1ff198c6cbf..8cedea931c0 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; -import org.elasticsearch.index.mapper.core.LegacyTokenCountFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper; @@ -52,7 +51,6 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -98,7 +96,6 @@ public class IndicesModule extends AbstractModule { registerMapper(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); registerMapper(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); registerMapper(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - registerMapper(PercolatorFieldMapper.CONTENT_TYPE, new PercolatorFieldMapper.TypeParser()); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerMapper(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 3b9b186ddd8..6b1d24e47f1 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -280,7 +280,7 @@ public class IndicesService extends AbstractLifecycleComponent i if (indexShard.routingEntry() == null) { continue; } - IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()) }); + IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexShard, flags), indexShard.commitStats()) }); if (!statsByShard.containsKey(indexService.index())) { statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats)); } else { diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index f4736f0332e..f205cdd8562 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -37,7 +37,6 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; @@ -102,11 +101,6 @@ public class NodeIndicesStats implements Streamable, ToXContent { return stats.getSearch(); } - @Nullable - public PercolatorQueryCacheStats getPercolate() { - return stats.getPercolatorCache(); - } - @Nullable public MergeStats getMerge() { return stats.getMerge(); diff --git a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java index 16b3aa10a22..fb10f8a1927 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/CompoundProcessor.java @@ -20,14 +20,13 @@ package org.elasticsearch.ingest.core; -import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.ElasticsearchException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.stream.Collectors; /** @@ -94,30 +93,38 @@ public class CompoundProcessor implements Processor { try { processor.execute(ingestDocument); } catch (Exception e) { + ElasticsearchException compoundProcessorException = newCompoundProcessorException(e, processor.getType(), processor.getTag()); if (onFailureProcessors.isEmpty()) { - throw e; + throw compoundProcessorException; } else { - executeOnFailure(ingestDocument, e, processor.getType(), processor.getTag()); + executeOnFailure(ingestDocument, compoundProcessorException); } - break; } } } - void executeOnFailure(IngestDocument ingestDocument, Exception cause, String failedProcessorType, String failedProcessorTag) throws Exception { + void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { try { - putFailureMetadata(ingestDocument, cause, failedProcessorType, failedProcessorTag); + putFailureMetadata(ingestDocument, exception); for (Processor processor : onFailureProcessors) { - processor.execute(ingestDocument); + try { + processor.execute(ingestDocument); + } catch (Exception e) { + throw newCompoundProcessorException(e, processor.getType(), processor.getTag()); + } } } finally { removeFailureMetadata(ingestDocument); } } - private void putFailureMetadata(IngestDocument ingestDocument, Exception cause, String failedProcessorType, String failedProcessorTag) { + private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { + List processorTypeHeader = cause.getHeader("processor_type"); + List processorTagHeader = cause.getHeader("processor_tag"); + String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null; + String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null; Map ingestMetadata = ingestDocument.getIngestMetadata(); - ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getMessage()); + ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage()); ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType); ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag); } @@ -128,4 +135,21 @@ public class CompoundProcessor implements Processor { ingestMetadata.remove(ON_FAILURE_PROCESSOR_TYPE_FIELD); ingestMetadata.remove(ON_FAILURE_PROCESSOR_TAG_FIELD); } + + private ElasticsearchException newCompoundProcessorException(Exception e, String processorType, String processorTag) { + if (e instanceof ElasticsearchException && ((ElasticsearchException)e).getHeader("processor_type") != null) { + return (ElasticsearchException) e; + } + + ElasticsearchException exception = new ElasticsearchException(new IllegalArgumentException(e)); + + if (processorType != null) { + exception.addHeader("processor_type", processorType); + } + if (processorTag != null) { + exception.addHeader("processor_tag", processorTag); + } + + return exception; + } } diff --git a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java index 8be307d776c..8b1f02607d8 100644 --- a/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/core/IngestDocument.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest.core; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; @@ -30,11 +29,11 @@ import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -43,8 +42,6 @@ import java.util.Map; import java.util.Objects; import java.util.TimeZone; -import static java.nio.charset.StandardCharsets.UTF_8; - /** * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). */ @@ -144,11 +141,7 @@ public final class IngestDocument { if (object instanceof byte[]) { return (byte[]) object; } else if (object instanceof String) { - try { - return Base64.decode(object.toString().getBytes(UTF_8)); - } catch (IOException e) { - throw new IllegalArgumentException("Could not base64 decode path [ " + path + "]", e); - } + return Base64.getDecoder().decode(object.toString()); } else { throw new IllegalArgumentException("Content field [" + path + "] of unknown type [" + object.getClass().getName() + "], must be string or byte array"); @@ -464,7 +457,6 @@ public final class IngestDocument { private static void appendValues(List list, Object value) { if (value instanceof List) { - @SuppressWarnings("unchecked") List valueList = (List) value; valueList.stream().forEach(list::add); } else { diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index d543d116c8f..ec222210b4e 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -19,6 +19,7 @@ package org.elasticsearch.node; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; @@ -168,7 +169,20 @@ public class Node implements Closeable { ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(tmpSettings)); final String displayVersion = version + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : ""); - logger.info("version[{}], pid[{}], build[{}/{}]", displayVersion, JvmInfo.jvmInfo().pid(), Build.CURRENT.shortHash(), Build.CURRENT.date()); + final JvmInfo jvmInfo = JvmInfo.jvmInfo(); + logger.info( + "version[{}], pid[{}], build[{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]", + displayVersion, + jvmInfo.pid(), + Build.CURRENT.shortHash(), + Build.CURRENT.date(), + Constants.OS_NAME, + Constants.OS_VERSION, + Constants.OS_ARCH, + Constants.JVM_VENDOR, + Constants.JVM_NAME, + Constants.JAVA_VERSION, + Constants.JVM_VERSION); logger.info("initializing ..."); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 124c8b2dbdd..24a9cf589bd 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -19,14 +19,11 @@ package org.elasticsearch.node.internal; -import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.cli.Terminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; -import org.elasticsearch.cli.Terminal; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; @@ -39,10 +36,13 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; import static org.elasticsearch.common.Strings.cleanPath; @@ -52,20 +52,18 @@ import static org.elasticsearch.common.Strings.cleanPath; public class InternalSettingsPreparer { private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"}; - static final String PROPERTY_PREFIX = "es."; - static final String PROPERTY_DEFAULTS_PREFIX = "es.default."; + static final String PROPERTY_DEFAULTS_PREFIX = "default."; + static final Predicate PROPERTY_DEFAULTS_PREDICATE = key -> key.startsWith(PROPERTY_DEFAULTS_PREFIX); public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; - public static final Setting IGNORE_SYSTEM_PROPERTIES_SETTING = - Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope); /** * Prepares the settings by gathering all elasticsearch system properties and setting defaults. */ public static Settings prepareSettings(Settings input) { Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, Collections.emptyMap()); finalizeSettings(output, null, null); return output.build(); } @@ -80,9 +78,23 @@ public class InternalSettingsPreparer { * @return the {@link Settings} and {@link Environment} as a {@link Tuple} */ public static Environment prepareEnvironment(Settings input, Terminal terminal) { + return prepareEnvironment(input, terminal, Collections.emptyMap()); + } + + /** + * Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings, + * and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded, + * settings with a value of ${prompt.text} or ${prompt.secret} will result in a prompt for + * the setting to the user. + * @param input The custom settings to use. These are not overwritten by settings in the configuration file. + * @param terminal the Terminal to use for input/output + * @param properties Map of properties key/value pairs (usually from the command-line) + * @return the {@link Settings} and {@link Environment} as a {@link Tuple} + */ + public static Environment prepareEnvironment(Settings input, Terminal terminal, Map properties) { // just create enough settings to build the environment, to get the config dir Settings.Builder output = Settings.builder(); - initializeSettings(output, input, true); + initializeSettings(output, input, true, properties); Environment environment = new Environment(output.build()); boolean settingsFileFound = false; @@ -103,7 +115,7 @@ public class InternalSettingsPreparer { // re-initialize settings now that the config file has been loaded // TODO: only re-initialize if a config file was actually loaded - initializeSettings(output, input, false); + initializeSettings(output, input, false, properties); finalizeSettings(output, terminal, environment.configFile()); environment = new Environment(output.build()); @@ -113,22 +125,16 @@ public class InternalSettingsPreparer { return new Environment(output.build()); } - private static boolean useSystemProperties(Settings input) { - return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input); - } - /** * Initializes the builder with the given input settings, and loads system properties settings if allowed. * If loadDefaults is true, system property default settings are loaded. */ - private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults) { + private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults, Map esSettings) { output.put(input); - if (useSystemProperties(input)) { - if (loadDefaults) { - output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties()); - } - output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX); + if (loadDefaults) { + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE, key -> key.substring(PROPERTY_DEFAULTS_PREFIX.length())); } + output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE.negate(), Function.identity()); output.replacePropertyPlaceholders(); } diff --git a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index e81d376f3b5..1b98128b85e 100644 --- a/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -27,11 +27,14 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.BufferedReader; import java.io.IOException; @@ -56,6 +59,7 @@ import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.zip.ZipEntry; @@ -95,7 +99,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; * elasticsearch config directory, using the name of the plugin. If any files to be installed * already exist, they will be skipped. */ -class InstallPluginCommand extends Command { +class InstallPluginCommand extends SettingCommand { private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging"; @@ -114,7 +118,6 @@ class InstallPluginCommand extends Command { "analysis-phonetic", "analysis-smartcn", "analysis-stempel", - "delete-by-query", "discovery-azure", "discovery-ec2", "discovery-gce", @@ -132,7 +135,6 @@ class InstallPluginCommand extends Command { "store-smb", "x-pack"))); - private final Environment env; private final OptionSpec batchOption; private final OptionSpec arguments; @@ -160,9 +162,8 @@ class InstallPluginCommand extends Command { FILE_PERMS = Collections.unmodifiableSet(filePerms); } - InstallPluginCommand(Environment env) { + InstallPluginCommand() { super("Install a plugin"); - this.env = env; this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), "Enable batch mode explicitly, automatic confirmation of security permission"); this.arguments = parser.nonOptions("plugin id"); @@ -178,7 +179,7 @@ class InstallPluginCommand extends Command { } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { @@ -186,12 +187,12 @@ class InstallPluginCommand extends Command { } String pluginId = args.get(0); boolean isBatch = options.has(batchOption) || System.console() == null; - execute(terminal, pluginId, isBatch); + execute(terminal, pluginId, isBatch, settings); } // pkg private for testing - void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception { - + void execute(Terminal terminal, String pluginId, boolean isBatch, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); // TODO: remove this leniency!! is it needed anymore? if (Files.exists(env.pluginsFile()) == false) { terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating..."); @@ -200,7 +201,7 @@ class InstallPluginCommand extends Command { Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile()); - install(terminal, isBatch, extractedZip); + install(terminal, isBatch, extractedZip, env); } /** Downloads the plugin and returns the file it was downloaded to. */ @@ -349,7 +350,7 @@ class InstallPluginCommand extends Command { } /** Load information about the plugin, and verify it can be installed with no errors. */ - private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception { + private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception { // read and validate the plugin descriptor PluginInfo info = PluginInfo.readFromProperties(pluginRoot); terminal.println(VERBOSE, info.toString()); @@ -398,12 +399,12 @@ class InstallPluginCommand extends Command { * Installs the plugin from {@code tmpRoot} into the plugins dir. * If the plugin has a bin dir and/or a config dir, those are copied. */ - private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception { + private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env) throws Exception { List deleteOnFailure = new ArrayList<>(); deleteOnFailure.add(tmpRoot); try { - PluginInfo info = verify(terminal, tmpRoot, isBatch); + PluginInfo info = verify(terminal, tmpRoot, isBatch, env); final Path destination = env.pluginsFile().resolve(info.getName()); if (Files.exists(destination)) { diff --git a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java index c03e70ad4da..bd2f853bac0 100644 --- a/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/ListPluginsCommand.java @@ -19,6 +19,13 @@ package org.elasticsearch.plugins; +import joptsimple.OptionSet; +import org.elasticsearch.cli.SettingCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; + import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -26,26 +33,20 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; - -import joptsimple.OptionSet; -import org.elasticsearch.cli.Command; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.env.Environment; +import java.util.Map; /** * A command for the plugin cli to list plugins installed in elasticsearch. */ -class ListPluginsCommand extends Command { +class ListPluginsCommand extends SettingCommand { - private final Environment env; - - ListPluginsCommand(Environment env) { + ListPluginsCommand() { super("Lists installed elasticsearch plugins"); - this.env = env; } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); if (Files.exists(env.pluginsFile()) == false) { throw new IOException("Plugins directory missing: " + env.pluginsFile()); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java index be06ea7db1c..3a88c4d0083 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginCli.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginCli.java @@ -26,21 +26,24 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import java.util.Collections; + /** * A cli tool for adding, removing and listing plugins for elasticsearch. */ public class PluginCli extends MultiCommand { - public PluginCli(Environment env) { + public PluginCli() { super("A tool for managing installed elasticsearch plugins"); - subcommands.put("list", new ListPluginsCommand(env)); - subcommands.put("install", new InstallPluginCommand(env)); - subcommands.put("remove", new RemovePluginCommand(env)); + subcommands.put("list", new ListPluginsCommand()); + subcommands.put("install", new InstallPluginCommand()); + subcommands.put("remove", new RemovePluginCommand()); } public static void main(String[] args) throws Exception { // initialize default for es.logger.level because we will not read the logging.yml String loggerLevel = System.getProperty("es.logger.level", "INFO"); + String pathHome = System.getProperty("es.path.home"); // Set the appender for all potential log files to terminal so that other components that use the logger print out the // same terminal. // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is @@ -48,12 +51,14 @@ public class PluginCli extends MultiCommand { // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. // Therefore we print to Terminal. Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() + .put("path.home", pathHome) .put("appender.terminal.type", "terminal") - .put("rootLogger", "${es.logger.level}, terminal") - .put("es.logger.level", loggerLevel) + .put("rootLogger", "${logger.level}, terminal") + .put("logger.level", loggerLevel) .build(), Terminal.DEFAULT); LogConfigurator.configure(loggingEnvironment.settings(), false); - Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT); - exit(new PluginCli(env).main(args, Terminal.DEFAULT)); + + exit(new PluginCli().main(args, Terminal.DEFAULT)); } + } diff --git a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java index a3e6c375f83..af48c1d8207 100644 --- a/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java +++ b/core/src/main/java/org/elasticsearch/plugins/RemovePluginCommand.java @@ -24,45 +24,49 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; +import java.util.Map; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.lucene.util.IOUtils; import org.elasticsearch.cli.Command; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.SettingCommand; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.Strings; import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; /** * A command for the plugin cli to remove a plugin from elasticsearch. */ -class RemovePluginCommand extends Command { +class RemovePluginCommand extends SettingCommand { - private final Environment env; private final OptionSpec arguments; - RemovePluginCommand(Environment env) { + RemovePluginCommand() { super("Removes a plugin from elasticsearch"); - this.env = env; this.arguments = parser.nonOptions("plugin name"); } @Override - protected void execute(Terminal terminal, OptionSet options) throws Exception { + protected void execute(Terminal terminal, OptionSet options, Map settings) throws Exception { // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args List args = arguments.values(options); if (args.size() != 1) { throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); } - execute(terminal, args.get(0)); + execute(terminal, args.get(0), settings); } // pkg private for testing - void execute(Terminal terminal, String pluginName) throws Exception { + void execute(Terminal terminal, String pluginName, Map settings) throws Exception { + final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings); + terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); Path pluginDir = env.pluginsFile().resolve(pluginName); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java index 2061c235e63..23d390dfcfe 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreFormat.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; import java.util.HashMap; @@ -51,6 +52,8 @@ public abstract class BlobStoreFormat { // when metadata is serialized certain elements of the metadata shouldn't be included into snapshot // exclusion of these elements is done by setting MetaData.CONTEXT_MODE_PARAM to MetaData.CONTEXT_MODE_SNAPSHOT snapshotOnlyParams.put(MetaData.CONTEXT_MODE_PARAM, MetaData.CONTEXT_MODE_SNAPSHOT); + // serialize SnapshotInfo using the SNAPSHOT mode + snapshotOnlyParams.put(SnapshotInfo.CONTEXT_MODE_PARAM, SnapshotInfo.CONTEXT_MODE_SNAPSHOT); SNAPSHOT_ONLY_FORMAT_PARAMS = new ToXContent.MapParams(snapshotOnlyParams); } diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 2194732445d..8cdbca512e7 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -123,9 +123,9 @@ public class BytesRestResponse extends RestResponse { params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request()); } else { if (status.getStatus() < 500) { - SUPPRESSED_ERROR_LOGGER.debug("{} Params: {}", t, channel.request().path(), channel.request().params()); + SUPPRESSED_ERROR_LOGGER.debug("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params()); } else { - SUPPRESSED_ERROR_LOGGER.warn("{} Params: {}", t, channel.request().path(), channel.request().params()); + SUPPRESSED_ERROR_LOGGER.warn("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params()); } params = channel.request(); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java index 8a4afd89ac4..a06466137a5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/reroute/RestClusterRerouteAction.java @@ -25,12 +25,20 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandRegistry; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -43,12 +51,17 @@ import java.util.EnumSet; /** */ public class RestClusterRerouteAction extends BaseRestHandler { + private static final ObjectParser PARSER = new ObjectParser<>("cluster_reroute"); + static { + PARSER.declareField((p, v, c) -> v.commands(AllocationCommands.fromXContent(p, c.getParseFieldMatcher(), c.registry)), + new ParseField("commands"), ValueType.OBJECT_ARRAY); + PARSER.declareBoolean(ClusterRerouteRequest::dryRun, new ParseField("dry_run")); + } - private final SettingsFilter settingsFilter; - - private static String DEFAULT_METRICS = Strings + private static final String DEFAULT_METRICS = Strings .arrayToCommaDelimitedString(EnumSet.complementOf(EnumSet.of(ClusterState.Metric.METADATA)).toArray()); + private final SettingsFilter settingsFilter; private final AllocationCommandRegistry registry; @Inject @@ -62,15 +75,7 @@ public class RestClusterRerouteAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception { - final ClusterRerouteRequest clusterRerouteRequest = Requests.clusterRerouteRequest(); - clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun())); - clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain())); - clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); - clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); - if (request.hasContent()) { - clusterRerouteRequest.source(request.content(), registry, parseFieldMatcher); - } - + ClusterRerouteRequest clusterRerouteRequest = createRequest(request, registry, parseFieldMatcher); client.admin().cluster().reroute(clusterRerouteRequest, new AcknowledgedRestListener(channel) { @Override protected void addCustomFields(XContentBuilder builder, ClusterRerouteResponse response) throws IOException { @@ -89,4 +94,35 @@ public class RestClusterRerouteAction extends BaseRestHandler { } }); } + + public static ClusterRerouteRequest createRequest(RestRequest request, AllocationCommandRegistry registry, + ParseFieldMatcher parseFieldMatcher) throws IOException { + ClusterRerouteRequest clusterRerouteRequest = Requests.clusterRerouteRequest(); + clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun())); + clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain())); + clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); + clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed())); + clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); + if (request.hasContent()) { + try (XContentParser parser = XContentHelper.createParser(request.content())) { + PARSER.parse(parser, clusterRerouteRequest, new ParseContext(registry, parseFieldMatcher)); + } + } + return clusterRerouteRequest; + } + + private static class ParseContext implements ParseFieldMatcherSupplier { + private final AllocationCommandRegistry registry; + private final ParseFieldMatcher parseFieldMatcher; + + private ParseContext(AllocationCommandRegistry registry, ParseFieldMatcher parseFieldMatcher) { + this.registry = registry; + this.parseFieldMatcher = parseFieldMatcher; + } + + @Override + public ParseFieldMatcher getParseFieldMatcher() { + return parseFieldMatcher; + } + } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java index dbda83709ba..55ed1d8dda4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/stats/RestIndicesStatsAction.java @@ -78,7 +78,6 @@ public class RestIndicesStatsAction extends BaseRestHandler { indicesStatsRequest.flush(metrics.contains("flush")); indicesStatsRequest.warmer(metrics.contains("warmer")); indicesStatsRequest.queryCache(metrics.contains("query_cache")); - indicesStatsRequest.percolate(metrics.contains("percolator_cache")); indicesStatsRequest.segments(metrics.contains("segments")); indicesStatsRequest.fieldData(metrics.contains("fielddata")); indicesStatsRequest.completion(metrics.contains("completion")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 958fa40b54b..ac8cefcc5aa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -77,27 +77,41 @@ public class RestIndicesAction extends AbstractCatAction { clusterStateRequest.clear().indices(indices).metaData(true); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout())); + final IndicesOptions strictExpandIndicesOptions = IndicesOptions.strictExpand(); + clusterStateRequest.indicesOptions(strictExpandIndicesOptions); client.admin().cluster().state(clusterStateRequest, new RestActionListener(channel) { @Override public void processResponse(final ClusterStateResponse clusterStateResponse) { - ClusterState state = clusterStateResponse.getState(); - final IndicesOptions concreteIndicesOptions = IndicesOptions.fromOptions(false, true, true, true); - final String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, concreteIndicesOptions, indices); - final String[] openIndices = indexNameExpressionResolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), indices); - ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest(openIndices); + final ClusterState state = clusterStateResponse.getState(); + final String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, strictExpandIndicesOptions, indices); + // concreteIndices should contain exactly the indices in state.metaData() that were selected by clusterStateRequest using + // IndicesOptions.strictExpand(). We select the indices again here so that they can be displayed in the resulting table + // in the requesting order. + assert concreteIndices.length == state.metaData().getIndices().size(); + + // Indices that were successfully resolved during the cluster state request might be deleted when the subsequent cluster + // health and indices stats requests execute. We have to distinguish two cases: + // 1) the deleted index was explicitly passed as parameter to the /_cat/indices request. In this case we want the subsequent + // requests to fail. + // 2) the deleted index was resolved as part of a wildcard or _all. In this case, we want the subsequent requests not to + // fail on the deleted index (as we want to ignore wildcards that cannot be resolved). + // This behavior can be ensured by letting the cluster health and indices stats requests re-resolve the index names with the + // same indices options that we used for the initial cluster state request (strictExpand). Unfortunately cluster health + // requests hard-code their indices options and the best we can do is apply strictExpand to the indices stats request. + ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest(indices); clusterHealthRequest.local(request.paramAsBoolean("local", clusterHealthRequest.local())); client.admin().cluster().health(clusterHealthRequest, new RestActionListener(channel) { @Override public void processResponse(final ClusterHealthResponse clusterHealthResponse) { IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); - indicesStatsRequest.indices(concreteIndices); - indicesStatsRequest.indicesOptions(concreteIndicesOptions); + indicesStatsRequest.indices(indices); + indicesStatsRequest.indicesOptions(strictExpandIndicesOptions); indicesStatsRequest.all(); client.admin().indices().stats(indicesStatsRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(IndicesStatsResponse indicesStatsResponse) throws Exception { - Table tab = buildTable(request, concreteIndices, clusterHealthResponse, indicesStatsResponse, clusterStateResponse.getState().metaData()); + Table tab = buildTable(request, concreteIndices, clusterHealthResponse, indicesStatsResponse, state.metaData()); return RestTable.buildResponse(tab, channel); } }); @@ -222,9 +236,6 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "sibling:pri;alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("pri.percolate.queries", "default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("pri.refresh.total", "default:false;text-align:right;desc:total refreshes"); @@ -424,9 +435,6 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime()); - table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolatorCache().getNumQueries()); - table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolatorCache().getNumQueries()); - table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 15656733baf..f23f0e0a029 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.PercolatorQueryCacheStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.shard.IndexingStats; @@ -185,8 +184,6 @@ public class RestNodesAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -338,9 +335,6 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(mergeStats == null ? null : mergeStats.getTotalSize()); table.addCell(mergeStats == null ? null : mergeStats.getTotalTime()); - PercolatorQueryCacheStats percolatorQueryCacheStats = indicesStats == null ? null : indicesStats.getPercolate(); - table.addCell(percolatorQueryCacheStats == null ? null : percolatorQueryCacheStats.getNumQueries()); - RefreshStats refreshStats = indicesStats == null ? null : indicesStats.getRefresh(); table.addCell(refreshStats == null ? null : refreshStats.getTotal()); table.addCell(refreshStats == null ? null : refreshStats.getTotalTime()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 9954bd3098b..6538b405fce 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -139,8 +139,6 @@ public class RestShardsAction extends AbstractCatAction { table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); - table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries"); - table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); @@ -278,8 +276,6 @@ public class RestShardsAction extends AbstractCatAction { table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalSize()); table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalTime()); - table.addCell(commonStats == null ? null : commonStats.getPercolatorCache().getNumQueries()); - table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotal()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotalTime()); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java index 226fd7b6c34..31ff9ac0b61 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptEngineRegistry.java @@ -30,13 +30,13 @@ public class ScriptEngineRegistry { private final Map, String> registeredScriptEngineServices; private final Map> registeredLanguages; - private final Map defaultInlineScriptModes; + private final Map defaultInlineScriptEnableds; public ScriptEngineRegistry(Iterable registrations) { Objects.requireNonNull(registrations); Map, String> registeredScriptEngineServices = new HashMap<>(); Map> registeredLanguages = new HashMap<>(); - Map inlineScriptModes = new HashMap<>(); + Map inlineScriptEnableds = new HashMap<>(); for (ScriptEngineRegistration registration : registrations) { String oldLanguage = registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(), registration.getScriptEngineLanguage()); @@ -51,12 +51,12 @@ public class ScriptEngineRegistry { throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" + scriptEngineServiceClazz.getCanonicalName() + "]"); } - inlineScriptModes.put(language, registration.getDefaultInlineScriptMode()); + inlineScriptEnableds.put(language, registration.getDefaultInlineScriptEnabled()); } this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices); this.registeredLanguages = Collections.unmodifiableMap(registeredLanguages); - this.defaultInlineScriptModes = Collections.unmodifiableMap(inlineScriptModes); + this.defaultInlineScriptEnableds = Collections.unmodifiableMap(inlineScriptEnableds); } Iterable> getRegisteredScriptEngineServices() { @@ -72,27 +72,27 @@ public class ScriptEngineRegistry { return registeredLanguages; } - Map getDefaultInlineScriptModes() { - return this.defaultInlineScriptModes; + Map getDefaultInlineScriptEnableds() { + return this.defaultInlineScriptEnableds; } public static class ScriptEngineRegistration { private final Class scriptEngineService; private final String scriptEngineLanguage; - private final ScriptMode defaultInlineScriptMode; + private final boolean defaultInlineScriptEnabled; /** * Register a script engine service with the default of inline scripts disabled */ public ScriptEngineRegistration(Class scriptEngineService, String scriptEngineLanguage) { - this(scriptEngineService, scriptEngineLanguage, ScriptMode.OFF); + this(scriptEngineService, scriptEngineLanguage, false); } /** * Register a script engine service with the given default mode for inline scripts */ public ScriptEngineRegistration(Class scriptEngineService, String scriptEngineLanguage, - ScriptMode defaultInlineScriptMode) { + boolean defaultInlineScriptEnabled) { Objects.requireNonNull(scriptEngineService); if (Strings.hasText(scriptEngineLanguage) == false) { throw new IllegalArgumentException("languages for script engine service [" + @@ -100,7 +100,7 @@ public class ScriptEngineRegistry { } this.scriptEngineService = scriptEngineService; this.scriptEngineLanguage = scriptEngineLanguage; - this.defaultInlineScriptMode = defaultInlineScriptMode; + this.defaultInlineScriptEnabled = defaultInlineScriptEnabled; } Class getScriptEngineService() { @@ -111,8 +111,8 @@ public class ScriptEngineRegistry { return scriptEngineLanguage; } - ScriptMode getDefaultInlineScriptMode() { - return defaultInlineScriptMode; + boolean getDefaultInlineScriptEnabled() { + return defaultInlineScriptEnabled; } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptMode.java b/core/src/main/java/org/elasticsearch/script/ScriptMode.java deleted file mode 100644 index 6508d2f1015..00000000000 --- a/core/src/main/java/org/elasticsearch/script/ScriptMode.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script; - -import java.util.HashMap; -import java.util.Map; - -/** - * Mode for a specific script, used for script settings. - * Defines whether a certain script or category of scripts can be executed or not. - */ -public enum ScriptMode { - ON("true"), - OFF("false"); - - private final String mode; - - ScriptMode(String mode) { - this.mode = mode; - } - - private static final Map SCRIPT_MODES; - - static { - SCRIPT_MODES = new HashMap<>(); - for (ScriptMode scriptMode : ScriptMode.values()) { - SCRIPT_MODES.put(scriptMode.mode, scriptMode); - } - } - - static ScriptMode parse(String input) { - ScriptMode scriptMode = SCRIPT_MODES.get(input); - if (scriptMode == null) { - throw new IllegalArgumentException("script mode [" + input + "] not supported"); - } - return scriptMode; - } - - public String getMode() { - return mode; - } - - @Override - public String toString() { - return mode; - } -} diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModes.java b/core/src/main/java/org/elasticsearch/script/ScriptModes.java index d07a4073ba8..46ab2a44d21 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModes.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModes.java @@ -29,22 +29,22 @@ import java.util.Map; import java.util.TreeMap; /** - * Holds the {@link org.elasticsearch.script.ScriptMode}s for each of the different scripting languages available, - * each script source and each scripted operation. + * Holds the boolean indicating the enabled mode for each of the different scripting languages available, each script source and each + * scripted operation. */ public class ScriptModes { private static final String SCRIPT_SETTINGS_PREFIX = "script"; private static final String ENGINE_SETTINGS_PREFIX = "script.engine"; - final Map scriptModes; + final Map scriptEnabled; ScriptModes(ScriptSettings scriptSettings, Settings settings) { - HashMap scriptModes = new HashMap<>(); - for (Setting scriptModeSetting : scriptSettings.getScriptLanguageSettings()) { + HashMap scriptModes = new HashMap<>(); + for (Setting scriptModeSetting : scriptSettings.getScriptLanguageSettings()) { scriptModes.put(scriptModeSetting.getKey(), scriptModeSetting.get(settings)); } - this.scriptModes = Collections.unmodifiableMap(scriptModes); + this.scriptEnabled = Collections.unmodifiableMap(scriptModes); } /** @@ -54,14 +54,14 @@ public class ScriptModes { * @param lang the language that the script is written in * @param scriptType the type of the script * @param scriptContext the operation that requires the execution of the script - * @return whether scripts are on or off + * @return whether scripts are enabled (true) or disabled (false) */ - public ScriptMode getScriptMode(String lang, ScriptType scriptType, ScriptContext scriptContext) { - //native scripts are always on as they are static by definition + public boolean getScriptEnabled(String lang, ScriptType scriptType, ScriptContext scriptContext) { + //native scripts are always enabled as they are static by definition if (NativeScriptEngineService.NAME.equals(lang)) { - return ScriptMode.ON; + return true; } - ScriptMode scriptMode = scriptModes.get(getKey(lang, scriptType, scriptContext)); + Boolean scriptMode = scriptEnabled.get(getKey(lang, scriptType, scriptContext)); if (scriptMode == null) { throw new IllegalArgumentException("script mode not found for lang [" + lang + "], script_type [" + scriptType + "], operation [" + scriptContext.getKey() + "]"); } @@ -87,10 +87,10 @@ public class ScriptModes { @Override public String toString() { //order settings by key before printing them out, for readability - TreeMap scriptModesTreeMap = new TreeMap<>(); - scriptModesTreeMap.putAll(scriptModes); + TreeMap scriptModesTreeMap = new TreeMap<>(); + scriptModesTreeMap.putAll(scriptEnabled); StringBuilder stringBuilder = new StringBuilder(); - for (Map.Entry stringScriptModeEntry : scriptModesTreeMap.entrySet()) { + for (Map.Entry stringScriptModeEntry : scriptModesTreeMap.entrySet()) { stringBuilder.append(stringScriptModeEntry.getKey()).append(": ").append(stringScriptModeEntry.getValue()).append("\n"); } return stringBuilder.toString(); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java index 0fa4d9ea731..c08c31c9bf2 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.script.ScriptMode; import java.util.ArrayList; import java.util.HashMap; @@ -42,7 +41,7 @@ public class ScriptModule extends AbstractModule { { scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, - NativeScriptEngineService.NAME, ScriptMode.ON)); + NativeScriptEngineService.NAME, true)); } private final Map> scripts = new HashMap<>(); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index f77a0aae330..cf0ba1ca941 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -473,15 +473,7 @@ public class ScriptService extends AbstractComponent implements Closeable { if (scriptContextRegistry.isSupportedContext(scriptContext) == false) { throw new IllegalArgumentException("script context [" + scriptContext.getKey() + "] not supported"); } - ScriptMode mode = scriptModes.getScriptMode(lang, scriptType, scriptContext); - switch (mode) { - case ON: - return true; - case OFF: - return false; - default: - throw new IllegalArgumentException("script mode [" + mode + "] not supported"); - } + return scriptModes.getScriptEnabled(lang, scriptType, scriptContext); } public ScriptStats stats() { @@ -610,14 +602,14 @@ public class ScriptService extends AbstractComponent implements Closeable { */ public enum ScriptType { - INLINE(0, "inline", "inline", ScriptMode.OFF), - STORED(1, "id", "stored", ScriptMode.OFF), - FILE(2, "file", "file", ScriptMode.ON); + INLINE(0, "inline", "inline", false), + STORED(1, "id", "stored", false), + FILE(2, "file", "file", true); private final int val; private final ParseField parseField; private final String scriptType; - private final ScriptMode defaultScriptMode; + private final boolean defaultScriptEnabled; public static ScriptType readFrom(StreamInput in) throws IOException { int scriptTypeVal = in.readVInt(); @@ -638,19 +630,19 @@ public class ScriptService extends AbstractComponent implements Closeable { } } - ScriptType(int val, String name, String scriptType, ScriptMode defaultScriptMode) { + ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) { this.val = val; this.parseField = new ParseField(name); this.scriptType = scriptType; - this.defaultScriptMode = defaultScriptMode; + this.defaultScriptEnabled = defaultScriptEnabled; } public ParseField getParseField() { return parseField; } - public ScriptMode getDefaultScriptMode() { - return defaultScriptMode; + public boolean getDefaultScriptEnabled() { + return defaultScriptEnabled; } public String getScriptType() { diff --git a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java index 433912e47e5..71013f28a06 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptSettings.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptSettings.java @@ -37,29 +37,28 @@ public class ScriptSettings { public final static String DEFAULT_LANG = "groovy"; - private final static Map> SCRIPT_TYPE_SETTING_MAP; + private final static Map> SCRIPT_TYPE_SETTING_MAP; static { - Map> scriptTypeSettingMap = new HashMap<>(); + Map> scriptTypeSettingMap = new HashMap<>(); for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { - scriptTypeSettingMap.put(scriptType, new Setting<>( + scriptTypeSettingMap.put(scriptType, Setting.boolSetting( ScriptModes.sourceKey(scriptType), - scriptType.getDefaultScriptMode().getMode(), - ScriptMode::parse, + scriptType.getDefaultScriptEnabled(), Property.NodeScope)); } SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); } - private final Map> scriptContextSettingMap; - private final List> scriptLanguageSettings; + private final Map> scriptContextSettingMap; + private final List> scriptLanguageSettings; private final Setting defaultScriptLanguageSetting; public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) { - Map> scriptContextSettingMap = contextSettings(scriptContextRegistry); + Map> scriptContextSettingMap = contextSettings(scriptContextRegistry); this.scriptContextSettingMap = Collections.unmodifiableMap(scriptContextSettingMap); - List> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry); + List> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry); this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings); this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> { @@ -70,24 +69,20 @@ public class ScriptSettings { }, Property.NodeScope); } - private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { - Map> scriptContextSettingMap = new HashMap<>(); + private static Map> contextSettings(ScriptContextRegistry scriptContextRegistry) { + Map> scriptContextSettingMap = new HashMap<>(); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { - scriptContextSettingMap.put(scriptContext, new Setting<>( - ScriptModes.operationKey(scriptContext), - ScriptMode.OFF.getMode(), - ScriptMode::parse, - Property.NodeScope - )); + scriptContextSettingMap.put(scriptContext, + Setting.boolSetting(ScriptModes.operationKey(scriptContext), false, Property.NodeScope)); } return scriptContextSettingMap; } - private static List> languageSettings(Map> scriptTypeSettingMap, - Map> scriptContextSettingMap, + private static List> languageSettings(Map> scriptTypeSettingMap, + Map> scriptContextSettingMap, ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) { - final List> scriptModeSettings = new ArrayList<>(); + final List> scriptModeSettings = new ArrayList<>(); for (final Class scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) { if (scriptEngineService == NativeScriptEngineService.class) { @@ -97,17 +92,17 @@ public class ScriptSettings { final String language = scriptEngineRegistry.getLanguage(scriptEngineService); for (final ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { // Top level, like "script.engine.groovy.inline" - final ScriptMode defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptModes().get(language); - ScriptMode defaultLangAndType = defaultNonFileScriptMode; + final boolean defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptEnableds().get(language); + boolean defaultLangAndType = defaultNonFileScriptMode; // Files are treated differently because they are never default-deny if (ScriptService.ScriptType.FILE == scriptType) { - defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptMode(); + defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptEnabled(); } - final ScriptMode defaultIfNothingSet = defaultLangAndType; + final boolean defaultIfNothingSet = defaultLangAndType; // Setting for something like "script.engine.groovy.inline" - final Setting langAndTypeSetting = new Setting<>(ScriptModes.getGlobalKey(language, scriptType), - defaultLangAndType.toString(), ScriptMode::parse, Property.NodeScope); + final Setting langAndTypeSetting = Setting.boolSetting(ScriptModes.getGlobalKey(language, scriptType), + defaultLangAndType, Property.NodeScope); scriptModeSettings.add(langAndTypeSetting); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { @@ -115,32 +110,31 @@ public class ScriptSettings { // A function that, given a setting, will return what the default should be. Since the fine-grained script settings // read from a bunch of different places this is implemented in this way. Function defaultSettingFn = settings -> { - final Setting globalOpSetting = scriptContextSettingMap.get(scriptContext); - final Setting globalTypeSetting = scriptTypeSettingMap.get(scriptType); - final Setting langAndTypeAndContextSetting = new Setting<>(langAndTypeAndContextName, - defaultIfNothingSet.toString(), ScriptMode::parse, Property.NodeScope); + final Setting globalOpSetting = scriptContextSettingMap.get(scriptContext); + final Setting globalTypeSetting = scriptTypeSettingMap.get(scriptType); + final Setting langAndTypeAndContextSetting = Setting.boolSetting(langAndTypeAndContextName, + defaultIfNothingSet, Property.NodeScope); // fallback logic for script mode settings if (langAndTypeAndContextSetting.exists(settings)) { // like: "script.engine.groovy.inline.aggs: true" - return langAndTypeAndContextSetting.get(settings).getMode(); + return langAndTypeAndContextSetting.get(settings).toString(); } else if (langAndTypeSetting.exists(settings)) { // like: "script.engine.groovy.inline: true" - return langAndTypeSetting.get(settings).getMode(); + return langAndTypeSetting.get(settings).toString(); } else if (globalOpSetting.exists(settings)) { // like: "script.aggs: true" - return globalOpSetting.get(settings).getMode(); + return globalOpSetting.get(settings).toString(); } else if (globalTypeSetting.exists(settings)) { // like: "script.inline: true" - return globalTypeSetting.get(settings).getMode(); + return globalTypeSetting.get(settings).toString(); } else { // Nothing is set! - return defaultIfNothingSet.getMode(); + return Boolean.toString(defaultIfNothingSet); } }; // The actual setting for finest grained script settings - Setting setting = new Setting<>(langAndTypeAndContextName, defaultSettingFn, - ScriptMode::parse, Property.NodeScope); + Setting setting = Setting.boolSetting(langAndTypeAndContextName, defaultSettingFn, Property.NodeScope); scriptModeSettings.add(setting); } } @@ -148,15 +142,15 @@ public class ScriptSettings { return scriptModeSettings; } - public Iterable> getScriptTypeSettings() { + public Iterable> getScriptTypeSettings() { return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values()); } - public Iterable> getScriptContextSettings() { + public Iterable> getScriptContextSettings() { return Collections.unmodifiableCollection(scriptContextSettingMap.values()); } - public Iterable> getScriptLanguageSettings() { + public Iterable> getScriptLanguageSettings() { return scriptLanguageSettings; } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index e76597c256c..f601edd50a4 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ParseFieldRegistry; -import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.CommonTermsQueryBuilder; @@ -61,7 +60,6 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; import org.elasticsearch.index.query.ParentIdQueryBuilder; -import org.elasticsearch.index.query.PercolateQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParser; @@ -469,7 +467,6 @@ public class SearchModule extends AbstractModule { fetchSubPhaseMultibinder.addBinding().to(MatchedQueriesFetchSubPhase.class); fetchSubPhaseMultibinder.addBinding().to(HighlightPhase.class); fetchSubPhaseMultibinder.addBinding().to(ParentFieldSubFetchPhase.class); - fetchSubPhaseMultibinder.addBinding().to(PercolatorHighlightSubFetchPhase.class); for (Class clazz : fetchSubPhases) { fetchSubPhaseMultibinder.addBinding().to(clazz); } @@ -697,7 +694,7 @@ public class SearchModule extends AbstractModule { registerQuery(ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent, ExistsQueryBuilder.QUERY_NAME_FIELD); registerQuery(MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent, MatchNoneQueryBuilder.QUERY_NAME_FIELD); registerQuery(ParentIdQueryBuilder::new, ParentIdQueryBuilder::fromXContent, ParentIdQueryBuilder.QUERY_NAME_FIELD); - registerQuery(PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent, PercolateQueryBuilder.QUERY_NAME_FIELD); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQuery(GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent, GeoShapeQueryBuilder.QUERY_NAME_FIELD); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java index 6adb01ad10c..983e131215d 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/matchedqueries/MatchedQueriesFetchSubPhase.java @@ -18,11 +18,14 @@ */ package org.elasticsearch.search.fetch.matchedqueries; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.InternalSearchHit; @@ -31,6 +34,8 @@ import org.elasticsearch.search.internal.SearchContext.Lifetime; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,58 +53,68 @@ public class MatchedQueriesFetchSubPhase implements FetchSubPhase { @Override public boolean hitsExecutionNeeded(SearchContext context) { - return false; + return true; // we short-circuit in hitsExecute } @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) { - } + if (hits.length == 0) { + return; + } + hits = hits.clone(); // don't modify the incoming hits + Arrays.sort(hits, (a, b) -> Integer.compare(a.docId(), b.docId())); + @SuppressWarnings("unchecked") + List[] matchedQueries = new List[hits.length]; + for (int i = 0; i < matchedQueries.length; ++i) { + matchedQueries[i] = new ArrayList<>(); + } - @Override - public boolean hitExecutionNeeded(SearchContext context) { - return !context.parsedQuery().namedFilters().isEmpty() - || (context.parsedPostFilter() !=null && !context.parsedPostFilter().namedFilters().isEmpty()); - } - - @Override - public void hitExecute(SearchContext context, HitContext hitContext) { - List matchedQueries = new ArrayList<>(2); + Map namedQueries = new HashMap<>(context.parsedQuery().namedFilters()); + if (context.parsedPostFilter() != null) { + namedQueries.putAll(context.parsedPostFilter().namedFilters()); + } try { - addMatchedQueries(hitContext, context.parsedQuery().namedFilters(), matchedQueries); - - if (context.parsedPostFilter() != null) { - addMatchedQueries(hitContext, context.parsedPostFilter().namedFilters(), matchedQueries); + for (Map.Entry entry : namedQueries.entrySet()) { + String name = entry.getKey(); + Query query = entry.getValue(); + int readerIndex = -1; + int docBase = -1; + Weight weight = context.searcher().createNormalizedWeight(query, false); + Bits matchingDocs = null; + for (int i = 0; i < hits.length; ++i) { + InternalSearchHit hit = hits[i]; + int hitReaderIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves()); + if (readerIndex != hitReaderIndex) { + readerIndex = hitReaderIndex; + LeafReaderContext ctx = context.searcher().getIndexReader().leaves().get(readerIndex); + docBase = ctx.docBase; + // scorers can be costly to create, so reuse them across docs of the same segment + Scorer scorer = weight.scorer(ctx); + matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorer); + } + if (matchingDocs.get(hit.docId() - docBase)) { + matchedQueries[i].add(name); + } + } + } + for (int i = 0; i < hits.length; ++i) { + hits[i].matchedQueries(matchedQueries[i].toArray(new String[0])); } } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } finally { SearchContext.current().clearReleasables(Lifetime.COLLECTION); } - - hitContext.hit().matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()])); } - private void addMatchedQueries(HitContext hitContext, Map namedQueries, List matchedQueries) throws IOException { - for (Map.Entry entry : namedQueries.entrySet()) { - String name = entry.getKey(); - Query filter = entry.getValue(); + @Override + public boolean hitExecutionNeeded(SearchContext context) { + return false; + } - final Weight weight = hitContext.topLevelSearcher().createNormalizedWeight(filter, false); - final Scorer scorer = weight.scorer(hitContext.readerContext()); - if (scorer == null) { - continue; - } - final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); - if (twoPhase == null) { - if (scorer.iterator().advance(hitContext.docId()) == hitContext.docId()) { - matchedQueries.add(name); - } - } else { - if (twoPhase.approximation().advance(hitContext.docId()) == hitContext.docId() && twoPhase.matches()) { - matchedQueries.add(name); - } - } - } + @Override + public void hitExecute(SearchContext context, HitContext hitContext) { + // we do everything in hitsExecute } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 96319303420..e69e4094f44 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; @@ -216,7 +215,7 @@ public class DefaultSearchContext extends SearchContext { + "be less than [" + maxWindow + "]. This prevents allocating massive heaps for storing the results to be " + "rescored. This limit can be set by chaning the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting."); - + } } } @@ -495,11 +494,6 @@ public class DefaultSearchContext extends SearchContext { return indexService.fieldData(); } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return indexService.cache().getPercolatorQueryCache(); - } - @Override public long timeoutInMillis() { return timeoutInMillis; diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index 8009d0b5fe4..ac283f1f7ef 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -33,7 +33,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -270,11 +269,6 @@ public abstract class FilteredSearchContext extends SearchContext { return in.fieldData(); } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return in.percolatorQueryCache(); - } - @Override public long timeoutInMillis() { return in.timeoutInMillis(); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 550a5f76caf..55b66c42a60 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -231,8 +230,6 @@ public abstract class SearchContext implements Releasable { public abstract IndexFieldDataService fieldData(); - public abstract PercolatorQueryCache percolatorQueryCache(); - public abstract long timeoutInMillis(); public abstract void timeoutInMillis(long timeoutInMillis); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 871e765cfd0..2b8ea8ace31 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -44,6 +44,8 @@ import java.util.List; public final class SnapshotInfo implements Comparable, ToXContent, FromXContentBuilder, Writeable { public static final SnapshotInfo PROTO = new SnapshotInfo("", Collections.emptyList(), 0); + public static final String CONTEXT_MODE_PARAM = "context_mode"; + public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT"; private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime"); private static final String SNAPSHOT = "snapshot"; private static final String INDICES = "indices"; @@ -294,37 +296,12 @@ public final class SnapshotInfo implements Comparable, ToXContent, @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { - builder.startObject(SNAPSHOT); - builder.field(NAME, name); - builder.field(VERSION_ID, version.id); - builder.startArray(INDICES); - for (String index : indices) { - builder.value(index); + // write snapshot info to repository snapshot blob format + if (CONTEXT_MODE_SNAPSHOT.equals(params.param(CONTEXT_MODE_PARAM))) { + return toXContentSnapshot(builder, params); } - builder.endArray(); - builder.field(STATE, state); - if (reason != null) { - builder.field(REASON, reason); - } - builder.field(START_TIME, startTime); - builder.field(END_TIME, endTime); - builder.field(TOTAL_SHARDS, totalShards); - builder.field(SUCCESSFUL_SHARDS, successfulShards); - builder.startArray(FAILURES); - for (SnapshotShardFailure shardFailure : shardFailures) { - builder.startObject(); - shardFailure.toXContent(builder, params); - builder.endObject(); - } - builder.endArray(); - builder.endObject(); - return builder; - } - /** - * Produces the external X-content that is delivered through the REST layer. - */ - public XContentBuilder toExternalXContent(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + // write snapshot info for the API and any other situations builder.startObject(); builder.field(SNAPSHOT, name); builder.field(VERSION_ID, version.id); @@ -363,6 +340,34 @@ public final class SnapshotInfo implements Comparable, ToXContent, return builder; } + private XContentBuilder toXContentSnapshot(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + builder.startObject(SNAPSHOT); + builder.field(NAME, name); + builder.field(VERSION_ID, version.id); + builder.startArray(INDICES); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + builder.field(STATE, state); + if (reason != null) { + builder.field(REASON, reason); + } + builder.field(START_TIME, startTime); + builder.field(END_TIME, endTime); + builder.field(TOTAL_SHARDS, totalShards); + builder.field(SUCCESSFUL_SHARDS, successfulShards); + builder.startArray(FAILURES); + for (SnapshotShardFailure shardFailure : shardFailures) { + builder.startObject(); + shardFailure.toXContent(builder, params); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + @Override public SnapshotInfo fromXContent(final XContentParser parser, final ParseFieldMatcher matcher) throws IOException { return fromXContent(parser); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8e6681893c9..5b52e915195 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -44,6 +45,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; @@ -366,14 +368,7 @@ public class SnapshotsService extends AbstractLifecycleComponentemptyList()); - } catch (Throwable t2) { - logger.warn("[{}] failed to close snapshot in repository", snapshot.snapshotId()); - } - userCreateSnapshotListener.onFailure(t); + removeSnapshotFromClusterState(snapshot.snapshotId(), null, t, new CleanupAfterErrorListener(snapshot, true, userCreateSnapshotListener, t)); } @Override @@ -395,17 +390,46 @@ public class SnapshotsService extends AbstractLifecycleComponent { + + private final SnapshotsInProgress.Entry snapshot; + private final boolean snapshotCreated; + private final CreateSnapshotListener userCreateSnapshotListener; + private final Throwable t; + + public CleanupAfterErrorListener(SnapshotsInProgress.Entry snapshot, boolean snapshotCreated, CreateSnapshotListener userCreateSnapshotListener, Throwable t) { + this.snapshot = snapshot; + this.snapshotCreated = snapshotCreated; + this.userCreateSnapshotListener = userCreateSnapshotListener; + this.t = t; + } + + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + cleanupAfterError(); + } + + @Override + public void onFailure(Throwable e) { + cleanupAfterError(); + } + + private void cleanupAfterError() { + if(snapshotCreated) { try { - repositoriesService.repository(snapshot.snapshotId().getRepository()).finalizeSnapshot(snapshot.snapshotId(), snapshot.indices(), snapshot.startTime(), - ExceptionsHelper.detailedMessage(t), 0, Collections.emptyList()); + repositoriesService.repository(snapshot.snapshotId().getRepository()).finalizeSnapshot( + snapshot.snapshotId(), snapshot.indices(), snapshot.startTime(), ExceptionsHelper.detailedMessage(t), 0, Collections.emptyList()); } catch (Throwable t2) { logger.warn("[{}] failed to close snapshot in repository", snapshot.snapshotId()); } } userCreateSnapshotListener.onFailure(t); } + } private SnapshotInfo inProgressSnapshot(SnapshotsInProgress.Entry entry) { @@ -818,6 +842,19 @@ public class SnapshotsService extends AbstractLifecycleComponent listener) { clusterService.submitStateUpdateTask("remove snapshot metadata", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -843,6 +880,9 @@ public class SnapshotsService extends AbstractLifecycleComponent implem ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes); future.addListener(listener); addedReleaseListener = true; - transportServiceAdapter.onRequestSent(node, requestId, action, request, options); + final TransportRequestOptions finalOptions = options; + ChannelFutureListener channelFutureListener = + f -> transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions); + future.addListener(channelFutureListener); } finally { if (!addedReleaseListener) { Releasables.close(bStream.bytes()); diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java index 57893ff1908..91b6bc120ad 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransportChannel.java @@ -37,6 +37,7 @@ import org.elasticsearch.transport.support.TransportStatus; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; +import org.jboss.netty.channel.ChannelFutureListener; import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; @@ -114,7 +115,10 @@ public class NettyTransportChannel implements TransportChannel { ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes); future.addListener(listener); addedReleaseListener = true; - transportServiceAdapter.onResponseSent(requestId, action, response, options); + final TransportResponseOptions finalOptions = options; + ChannelFutureListener onResponseSentListener = + f -> transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions); + future.addListener(onResponseSentListener); } finally { if (!addedReleaseListener && bStream != null) { Releasables.close(bStream.bytes()); @@ -137,8 +141,10 @@ public class NettyTransportChannel implements TransportChannel { BytesReference bytes = stream.bytes(); ChannelBuffer buffer = bytes.toChannelBuffer(); NettyHeader.writeHeader(buffer, requestId, status, version); - channel.write(buffer); - transportServiceAdapter.onResponseSent(requestId, action, error); + ChannelFuture future = channel.write(buffer); + ChannelFutureListener onResponseSentListener = + f -> transportServiceAdapter.onResponseSent(requestId, action, error); + future.addListener(onResponseSentListener); } private void close() { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 74404903e91..32f7b2bf0dd 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -71,7 +71,7 @@ grant { // set by ESTestCase to improve test reproducibility // TODO: set this with gradle or some other way that repros with seed? - permission java.util.PropertyPermission "es.processors.override", "write"; + permission java.util.PropertyPermission "processors.override", "write"; // TODO: these simply trigger a noisy warning if its unable to clear the properties // fix that in randomizedtesting diff --git a/plugins/delete-by-query/build.gradle b/core/src/test/java/org/apache/log4j/Java9HackTests.java similarity index 79% rename from plugins/delete-by-query/build.gradle rename to core/src/test/java/org/apache/log4j/Java9HackTests.java index 2a5d00519e2..e917f1d3060 100644 --- a/plugins/delete-by-query/build.gradle +++ b/core/src/test/java/org/apache/log4j/Java9HackTests.java @@ -17,8 +17,12 @@ * under the License. */ -esplugin { - description 'The Delete By Query plugin allows to delete documents in Elasticsearch with a single query.' - classname 'org.elasticsearch.plugin.deletebyquery.DeleteByQueryPlugin' -} +package org.apache.log4j; +import org.elasticsearch.test.ESTestCase; + +public class Java9HackTests extends ESTestCase { + public void testJava9Hack() { + assertNotNull(MDC.mdc.tlm != null); + } +} diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 79edf0f5fff..b28758ed941 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -743,7 +743,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(105, org.elasticsearch.cluster.routing.RoutingException.class); ids.put(106, org.elasticsearch.index.shard.IndexShardRecoveryException.class); ids.put(107, org.elasticsearch.repositories.RepositoryMissingException.class); - ids.put(108, org.elasticsearch.index.percolator.PercolatorException.class); + ids.put(108, null); ids.put(109, org.elasticsearch.index.engine.DocumentSourceMissingException.class); ids.put(110, org.elasticsearch.index.engine.FlushNotAllowedEngineException.class); ids.put(111, org.elasticsearch.common.settings.NoClassSettingsException.class); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 628e26bc4cf..9b19a34b2ff 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -120,6 +120,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { assertThat(cae.getShard().getIndexName(), equalTo("only-foo")); assertFalse(cae.isPrimary()); assertFalse(cae.isAssigned()); + assertFalse(cae.isStillFetchingShardData()); assertThat(UnassignedInfo.Reason.INDEX_CREATED, equalTo(cae.getUnassignedInfo().getReason())); assertThat("expecting no remaining delay: " + cae.getRemainingDelayMillis(), cae.getRemainingDelayMillis(), equalTo(0L)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java index db130238443..d5cefc6d1f3 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java @@ -42,6 +42,7 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { assertEquals(0, cae.getShard().getId()); assertEquals(false, cae.isPrimary()); assertNull(cae.getAssignedNodeId()); + assertFalse(cae.isStillFetchingShardData()); assertNotNull(cae.getUnassignedInfo()); NodeExplanation explanation = cae.getNodeExplanations().values().iterator().next(); ClusterAllocationExplanation.FinalDecision fd = explanation.getFinalDecision(); @@ -68,6 +69,7 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { assertEquals("test", cae.getShard().getIndexName()); assertEquals(0, cae.getShard().getId()); assertEquals(true, cae.isPrimary()); + assertFalse(cae.isStillFetchingShardData()); assertNotNull("shard should have assigned node id", cae.getAssignedNodeId()); assertNull("assigned shard should not have unassigned info", cae.getUnassignedInfo()); explanation = cae.getNodeExplanations().values().iterator().next(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java index 1f6df76c82d..511c547f233 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java @@ -81,26 +81,6 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { } - private NodeExplanation makeNodeExplanation(boolean primary, boolean isAssigned, boolean hasErr, boolean hasActiveId) { - Float nodeWeight = randomFloat(); - Exception e = hasErr ? new ElasticsearchException("stuff's broke, yo") : null; - IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", - IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, e); - String assignedNodeId; - if (isAssigned) { - assignedNodeId = "node-0"; - } else { - assignedNodeId = "node-9"; - } - Set activeAllocationIds = new HashSet<>(); - if (hasActiveId) { - activeAllocationIds.add("eggplant"); - } - - return TransportClusterAllocationExplainAction.calculateNodeExplanation(primary ? primaryShard : replicaShard, - indexMetaData, node, noDecision, nodeWeight, storeStatus, assignedNodeId, activeAllocationIds); - } - private void assertExplanations(NodeExplanation ne, String finalExplanation, ClusterAllocationExplanation.FinalDecision finalDecision, ClusterAllocationExplanation.StoreCopy storeCopy) { assertEquals(finalExplanation, ne.getFinalExplanation()); @@ -117,71 +97,89 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { ShardRouting primaryStartedShard = ShardRouting.newUnassigned(new ShardId(i, 0), null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "foo")); assertTrue(primaryStartedShard.allocatedPostIndexCreate(indexMetaData)); + ShardRouting replicaStartedShard = ShardRouting.newUnassigned(new ShardId(i, 0), null, false, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "foo")); + assertTrue(replicaStartedShard.allocatedPostIndexCreate(indexMetaData)); IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, e); NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, - yesDecision, nodeWeight, storeStatus, "", activeAllocationIds); + yesDecision, nodeWeight, storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the copy of the shard cannot be read", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.IO_ERROR); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight, - null, "", activeAllocationIds); + null, "", activeAllocationIds, false); assertExplanations(ne, "the shard can be assigned", ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.NONE); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryStartedShard, indexMetaData, node, yesDecision, - nodeWeight, null, "", activeAllocationIds); + nodeWeight, null, "", activeAllocationIds, false); assertExplanations(ne, "there is no copy of the shard available", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.NONE); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, noDecision, nodeWeight, - null, "", activeAllocationIds); + null, "", activeAllocationIds, false); assertExplanations(ne, "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.NONE); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, noDecision, nodeWeight, - storeStatus, "", activeAllocationIds); + storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.AVAILABLE); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, corruptE); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight, - storeStatus, "", activeAllocationIds); + storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the copy of the shard is corrupt", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.CORRUPT); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "banana", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight, - storeStatus, "", activeAllocationIds); + storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the shard can be assigned", ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.STALE); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "banana", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryStartedShard, indexMetaData, node, yesDecision, - nodeWeight, storeStatus, "", activeAllocationIds); + nodeWeight, storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the copy of the shard is stale, allocation ids do not match", ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.STALE); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight, - storeStatus, "node-0", activeAllocationIds); + storeStatus, "node-0", activeAllocationIds, false); assertExplanations(ne, "the shard is already assigned to this node", ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, ClusterAllocationExplanation.StoreCopy.AVAILABLE); storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight, - storeStatus, "", activeAllocationIds); + storeStatus, "", activeAllocationIds, false); assertExplanations(ne, "the shard can be assigned and the node contains a valid copy of the shard data", ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.AVAILABLE); -} + + storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", + IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); + ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryStartedShard, indexMetaData, node, yesDecision, + nodeWeight, storeStatus, "", activeAllocationIds, true); + assertExplanations(ne, "the shard's state is still being fetched so it cannot be allocated", + ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.AVAILABLE); + + storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", + IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null); + ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(replicaStartedShard, indexMetaData, node, noDecision, + nodeWeight, storeStatus, "", activeAllocationIds, true); + assertExplanations(ne, "the shard cannot be assigned because allocation deciders return a NO " + + "decision and the shard's state is still being fetched", + ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.AVAILABLE); + } public void testDecisionEquality() { Decision.Multi d = new Decision.Multi(); @@ -206,10 +204,10 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null); NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, - yesDecision, nodeWeight, storeStatus, "", activeAllocationIds); + yesDecision, nodeWeight, storeStatus, "", activeAllocationIds, false); nodeExplanations.put(ne.getNode(), ne); ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shard, true, - "assignedNode", remainingDelay, null, nodeExplanations); + "assignedNode", remainingDelay, null, false, nodeExplanations); BytesStreamOutput out = new BytesStreamOutput(); cae.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes()); @@ -243,21 +241,21 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant", IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, new ElasticsearchException("stuff's broke, yo")); NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, - d, nodeWeight, storeStatus, "node-0", allocationIds); + d, nodeWeight, storeStatus, "node-0", allocationIds, false); Map nodeExplanations = new HashMap<>(1); nodeExplanations.put(ne.getNode(), ne); ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shardId, true, - "assignedNode", remainingDelay, null, nodeExplanations); + "assignedNode", remainingDelay, null, false, nodeExplanations); XContentBuilder builder = XContentFactory.jsonBuilder(); cae.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"shard\":{\"index\":\"foo\",\"index_uuid\":\"uuid\",\"id\":0,\"primary\":true},\"assigned\":true," + - "\"assigned_node_id\":\"assignedNode\",\"nodes\":{\"node-0\":{\"node_name\":\"\",\"node_attribute" + - "s\":{},\"store\":{\"shard_copy\":\"IO_ERROR\",\"store_exception\":\"ElasticsearchException[stuff" + - "'s broke, yo]\"},\"final_decision\":\"ALREADY_ASSIGNED\",\"final_explanation\":\"the shard is al" + - "ready assigned to this node\",\"weight\":1.5,\"decisions\":[{\"decider\":\"no label\",\"decision" + - "\":\"NO\",\"explanation\":\"because I said no\"},{\"decider\":\"yes label\",\"decision\":\"YES\"" + - ",\"explanation\":\"yes please\"},{\"decider\":\"throttle label\",\"decision\":\"THROTTLE\",\"exp" + - "lanation\":\"wait a sec\"}]}}}", + "\"assigned_node_id\":\"assignedNode\",\"shard_state_fetch_pending\":false,\"nodes\":{\"node-0\":" + + "{\"node_name\":\"\",\"node_attributes\":{},\"store\":{\"shard_copy\":\"IO_ERROR\",\"store_except" + + "ion\":\"ElasticsearchException[stuff's broke, yo]\"},\"final_decision\":\"ALREADY_ASSIGNED\",\"f" + + "inal_explanation\":\"the shard is already assigned to this node\",\"weight\":1.5,\"decisions\":[" + + "{\"decider\":\"no label\",\"decision\":\"NO\",\"explanation\":\"because I said no\"},{\"decider" + + "\":\"yes label\",\"decision\":\"YES\",\"explanation\":\"yes please\"},{\"decider\":\"throttle la" + + "bel\",\"decision\":\"THROTTLE\",\"explanation\":\"wait a sec\"}]}}}", builder.string()); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 9af2bb07417..c35a9e91314 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -50,7 +50,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index ffc19aa0dcd..f04c758ef9a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -57,8 +57,8 @@ import java.util.function.Supplier; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; /** * The test case for unit testing task manager and related transport actions diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java new file mode 100644 index 00000000000..06ca3a4f869 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.reroute; + +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandRegistry; +import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.admin.cluster.reroute.RestClusterRerouteAction; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +/** + * Test for serialization and parsing of {@link ClusterRerouteRequest} and its commands. See the superclass for, well, everything. + */ +public class ClusterRerouteRequestTests extends ESTestCase { + private static final int ROUNDS = 30; + private final List> RANDOM_COMMAND_GENERATORS = unmodifiableList(Arrays.asList( + () -> new AllocateReplicaAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), + randomAsciiOfLengthBetween(2, 10)), + () -> new AllocateEmptyPrimaryAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), + randomAsciiOfLengthBetween(2, 10), randomBoolean()), + () -> new AllocateStalePrimaryAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), + randomAsciiOfLengthBetween(2, 10), randomBoolean()), + () -> new CancelAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), + randomAsciiOfLengthBetween(2, 10), randomBoolean()), + () -> new MoveAllocationCommand(randomAsciiOfLengthBetween(2, 10), between(0, 1000), + randomAsciiOfLengthBetween(2, 10), randomAsciiOfLengthBetween(2, 10)))); + private final NamedWriteableRegistry namedWriteableRegistry; + private final AllocationCommandRegistry allocationCommandRegistry; + + public ClusterRerouteRequestTests() { + namedWriteableRegistry = new NamedWriteableRegistry(); + allocationCommandRegistry = new NetworkModule(null, null, true, namedWriteableRegistry).getAllocationCommandRegistry(); + } + + private ClusterRerouteRequest randomRequest() { + ClusterRerouteRequest request = new ClusterRerouteRequest(); + int commands = between(0, 10); + for (int i = 0; i < commands; i++) { + request.add(randomFrom(RANDOM_COMMAND_GENERATORS).get()); + } + request.dryRun(randomBoolean()); + request.explain(randomBoolean()); + request.setRetryFailed(randomBoolean()); + return request; + } + + public void testEqualsAndHashCode() { + for (int round = 0; round < ROUNDS; round++) { + ClusterRerouteRequest request = randomRequest(); + assertEquals(request, request); + assertEquals(request.hashCode(), request.hashCode()); + + ClusterRerouteRequest copy = new ClusterRerouteRequest() + .add(request.getCommands().commands().toArray(new AllocationCommand[0])); + copy.dryRun(request.dryRun()).explain(request.explain()).timeout(request.timeout()).setRetryFailed(request.isRetryFailed()); + copy.masterNodeTimeout(request.masterNodeTimeout()); + assertEquals(request, copy); + assertEquals(copy, request); // Commutative + assertEquals(request.hashCode(), copy.hashCode()); + + // Changing dryRun makes requests not equal + copy.dryRun(!copy.dryRun()); + assertNotEquals(request, copy); + assertNotEquals(request.hashCode(), copy.hashCode()); + copy.dryRun(!copy.dryRun()); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + + // Changing explain makes requests not equal + copy.explain(!copy.explain()); + assertNotEquals(request, copy); + assertNotEquals(request.hashCode(), copy.hashCode()); + copy.explain(!copy.explain()); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + + // Changing timeout makes requests not equal + copy.timeout(timeValueMillis(request.timeout().millis() + 1)); + assertNotEquals(request, copy); + assertNotEquals(request.hashCode(), copy.hashCode()); + copy.timeout(request.timeout()); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + + // Changing masterNodeTime makes requests not equal + copy.masterNodeTimeout(timeValueMillis(request.masterNodeTimeout().millis() + 1)); + assertNotEquals(request, copy); + assertNotEquals(request.hashCode(), copy.hashCode()); + copy.masterNodeTimeout(request.masterNodeTimeout()); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + + // Changing commands makes requests not equal + copy.add(randomFrom(RANDOM_COMMAND_GENERATORS).get()); + assertNotEquals(request, copy); + // Can't check hashCode because we can't be sure that changing commands changes the hashCode. It usually does but might not. + } + } + + public void testSerialization() throws IOException { + for (int round = 0; round < ROUNDS; round++) { + ClusterRerouteRequest request = randomRequest(); + ClusterRerouteRequest copy = roundTripThroughBytes(request); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + assertNotSame(request, copy); + } + } + + public void testParsing() throws IOException { + for (int round = 0; round < ROUNDS; round++) { + ClusterRerouteRequest request = randomRequest(); + ClusterRerouteRequest copy = roundTripThroughRestRequest(request); + assertEquals(request, copy); + assertEquals(request.hashCode(), copy.hashCode()); + assertNotSame(request, copy); + } + } + + private ClusterRerouteRequest roundTripThroughBytes(ClusterRerouteRequest original) throws IOException { + try (BytesStreamOutput output = new BytesStreamOutput()) { + original.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + ClusterRerouteRequest copy = new ClusterRerouteRequest(); + copy.readFrom(in); + return copy; + } + } + } + + private ClusterRerouteRequest roundTripThroughRestRequest(ClusterRerouteRequest original) throws IOException { + RestRequest restRequest = toRestRequest(original); + return RestClusterRerouteAction.createRequest(restRequest, allocationCommandRegistry, ParseFieldMatcher.STRICT); + } + + private static RestRequest toRestRequest(ClusterRerouteRequest original) throws IOException { + Map params = new HashMap<>(); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + boolean hasBody = false; + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); + if (randomBoolean()) { + params.put("dry_run", Boolean.toString(original.dryRun())); + } else { + hasBody = true; + builder.field("dry_run", original.dryRun()); + } + params.put("explain", Boolean.toString(original.explain())); + if (false == original.timeout().equals(AcknowledgedRequest.DEFAULT_ACK_TIMEOUT) || randomBoolean()) { + params.put("timeout", original.timeout().toString()); + } + if (original.isRetryFailed() || randomBoolean()) { + params.put("retry_failed", Boolean.toString(original.isRetryFailed())); + } + if (false == original.masterNodeTimeout().equals(MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT) || randomBoolean()) { + params.put("master_timeout", original.masterNodeTimeout().toString()); + } + if (original.getCommands() != null) { + hasBody = true; + original.getCommands().toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endObject(); + + return new FakeRestRequest(emptyMap(), params, hasBody ? builder.bytes() : null); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java new file mode 100644 index 00000000000..c11bf0b9ca6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -0,0 +1,181 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.admin.cluster.reroute; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.gateway.NoopGatewayAllocator; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; + +public class ClusterRerouteTests extends ESAllocationTestCase { + + public void testSerializeRequest() throws IOException { + ClusterRerouteRequest req = new ClusterRerouteRequest(); + req.setRetryFailed(randomBoolean()); + req.dryRun(randomBoolean()); + req.explain(randomBoolean()); + req.add(new AllocateEmptyPrimaryAllocationCommand("foo", 1, "bar", randomBoolean())); + req.timeout(TimeValue.timeValueMillis(randomIntBetween(0, 100))); + BytesStreamOutput out = new BytesStreamOutput(); + req.writeTo(out); + BytesReference bytes = out.bytes(); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry); + StreamInput wrap = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes.toBytes()), + namedWriteableRegistry); + ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest(); + deserializedReq.readFrom(wrap); + + assertEquals(req.isRetryFailed(), deserializedReq.isRetryFailed()); + assertEquals(req.dryRun(), deserializedReq.dryRun()); + assertEquals(req.explain(), deserializedReq.explain()); + assertEquals(req.timeout(), deserializedReq.timeout()); + assertEquals(1, deserializedReq.getCommands().commands().size()); // allocation commands have their own tests + assertEquals(req.getCommands().commands().size(), deserializedReq.getCommands().commands().size()); + } + + public void testClusterStateUpdateTask() { + AllocationService allocationService = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, + Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); + ClusterState clusterState = createInitialClusterState(allocationService); + ClusterRerouteRequest req = new ClusterRerouteRequest(); + req.dryRun(true); + AtomicReference responseRef = new AtomicReference<>(); + ActionListener responseActionListener = new ActionListener() { + @Override + public void onResponse(ClusterRerouteResponse clusterRerouteResponse) { + responseRef.set(clusterRerouteResponse); + } + + @Override + public void onFailure(Throwable e) { + + } + }; + TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask task = + new TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask(logger, allocationService, req, + responseActionListener ); + ClusterState execute = task.execute(clusterState); + assertSame(execute, clusterState); // dry-run + task.onAllNodesAcked(null); + assertNotSame(responseRef.get().getState(), execute); + + req.dryRun(false);// now we allocate + + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries; i++) { + ClusterState newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + clusterState = newState; + RoutingTable routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i); + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = allocationService.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + clusterState = ClusterState.builder(clusterState).routingTable(result.routingTable()).build(); + routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + if (i == retries-1) { + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + } else { + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + } + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + } + + + // without retry_failed we won't allocate that shard + ClusterState newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + task.onAllNodesAcked(null); + assertSame(responseRef.get().getState(), newState); + RoutingTable routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + + req.setRetryFailed(true); // now we manually retry and get the shard back into initializing + newState = task.execute(clusterState); + assertNotSame(newState, clusterState); // dry-run=false + clusterState = newState; + routingTable = clusterState.routingTable(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + } + + private ClusterState createInitialClusterState(AllocationService service) { + MetaData.Builder metaBuilder = MetaData.builder(); + metaBuilder.put(IndexMetaData.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)); + MetaData metaData = metaBuilder.build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + routingTableBuilder.addAsNew(metaData.index("idx")); + + RoutingTable routingTable = routingTableBuilder.build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData).routingTable(routingTable).build(); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) + .build(); + RoutingTable prevRoutingTable = routingTable; + routingTable = service.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(prevRoutingTable.index("idx").shards().size(), 1); + assertEquals(prevRoutingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + return clusterState; + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index db1d09b3aa7..5131ddebca7 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -51,7 +51,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 56e67bb4ad2..30c29f8db5d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestProcessor; @@ -167,7 +168,8 @@ public class SimulateExecutionServiceTests extends ESTestCase { SimulateDocumentBaseResult simulateDocumentBaseResult = (SimulateDocumentBaseResult) actualItemResponse; assertThat(simulateDocumentBaseResult.getIngestDocument(), nullValue()); assertThat(simulateDocumentBaseResult.getFailure(), instanceOf(RuntimeException.class)); - RuntimeException runtimeException = (RuntimeException) simulateDocumentBaseResult.getFailure(); - assertThat(runtimeException.getMessage(), equalTo("processor failed")); + Exception exception = simulateDocumentBaseResult.getFailure(); + assertThat(exception, instanceOf(ElasticsearchException.class)); + assertThat(exception.getMessage(), equalTo("java.lang.IllegalArgumentException: java.lang.RuntimeException: processor failed")); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 23b0df27480..0fac744625f 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -78,8 +78,8 @@ import java.util.function.Supplier; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.object.HasToString.hasToString; diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index ccdb13f710a..b92ba64c2fb 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -59,8 +59,8 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index a3335d87fd6..a7289124704 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -55,8 +55,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.mockito.Mockito.mock; public class TransportNodesActionTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 5253097818e..6175f822b6a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -65,8 +65,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithAssignedPrimariesAndOneReplica; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithNoShard; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index d88960ea9ac..2e81ec712eb 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -86,8 +85,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 86165461c84..10132af1b1d 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -60,8 +60,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Supplier; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.core.IsEqual.equalTo; public class TransportInstanceSingleOperationActionTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 632646146fe..8b8a4d947a9 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -22,25 +22,15 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; -public class ElasticsearchCliTests extends ESTestCase { +public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { public void testVersion() throws Exception { runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d"); @@ -96,7 +86,7 @@ public class ElasticsearchCliTests extends ESTestCase { false, output -> assertThat(output, containsString("Positional arguments not allowed, found [foo]")), (foreground, pidFile, esSettings) -> {}, - "-E", "something", "foo", "-E", "somethingelse" + "-E", "foo=bar", "foo", "-E", "baz=qux" ); } @@ -138,26 +128,10 @@ public class ElasticsearchCliTests extends ESTestCase { output -> {}, (foreground, pidFile, esSettings) -> { assertThat(esSettings.size(), equalTo(2)); - assertThat(esSettings, hasEntry("es.foo", "bar")); - assertThat(esSettings, hasEntry("es.baz", "qux")); + assertThat(esSettings, hasEntry("foo", "bar")); + assertThat(esSettings, hasEntry("baz", "qux")); }, - "-Ees.foo=bar", "-E", "es.baz=qux" - ); - } - - public void testElasticsearchSettingPrefix() throws Exception { - runElasticsearchSettingPrefixTest("-E", "foo"); - runElasticsearchSettingPrefixTest("-E", "foo=bar"); - runElasticsearchSettingPrefixTest("-E", "=bar"); - } - - private void runElasticsearchSettingPrefixTest(String... args) throws Exception { - runTest( - ExitCodes.USAGE, - false, - output -> assertThat(output, containsString("Elasticsearch settings must be prefixed with [es.] but was [")), - (foreground, pidFile, esSettings) -> {}, - args + "-Efoo=bar", "-E", "baz=qux" ); } @@ -165,9 +139,9 @@ public class ElasticsearchCliTests extends ESTestCase { runTest( ExitCodes.USAGE, false, - output -> assertThat(output, containsString("Elasticsearch setting [es.foo] must not be empty")), + output -> assertThat(output, containsString("Setting [foo] must not be empty")), (foreground, pidFile, esSettings) -> {}, - "-E", "es.foo=" + "-E", "foo=" ); } @@ -180,36 +154,4 @@ public class ElasticsearchCliTests extends ESTestCase { "--network.host"); } - private interface InitConsumer { - void accept(final boolean foreground, final String pidFile, final Map esSettings); - } - - private void runTest( - final int expectedStatus, - final boolean expectedInit, - final Consumer outputConsumer, - final InitConsumer initConsumer, - String... args) throws Exception { - final MockTerminal terminal = new MockTerminal(); - try { - final AtomicBoolean init = new AtomicBoolean(); - final int status = Elasticsearch.main(args, new Elasticsearch() { - @Override - void init(final boolean daemonize, final String pidFile, final Map esSettings) { - init.set(true); - initConsumer.accept(!daemonize, pidFile, esSettings); - } - }, terminal); - assertThat(status, equalTo(expectedStatus)); - assertThat(init.get(), equalTo(expectedInit)); - outputConsumer.accept(terminal.getOutput()); - } catch (Throwable t) { - // if an unexpected exception is thrown, we log - // terminal output to aid debugging - logger.info(terminal.getOutput()); - // rethrow so the test fails - throw t; - } - } - } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index a20a5247ed6..9cdeef2a7ff 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -59,7 +59,6 @@ public class TransportClientIT extends ESIntegTestCase { .put("http.enabled", false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .build()); node.start(); try { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index 2fcadb51a10..4ec1f66df57 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -55,7 +55,6 @@ public class TransportClientRetryIT extends ESIntegTestCase { .put("node.name", "transport_client_retry_test") .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient client = TransportClient.builder().settings(builder.build()).build()) { diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 99c46f1fc2e..84410a92c83 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -40,9 +39,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import java.util.HashMap; - -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @@ -101,22 +97,6 @@ public class NoMasterNodeIT extends ESIntegTestCase { ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE ); - PercolateSourceBuilder percolateSource = new PercolateSourceBuilder(); - percolateSource.setDoc(docBuilder().setDoc(new HashMap())); - assertThrows(client().preparePercolate() - .setIndices("test").setDocumentType("type1") - .setSource(percolateSource), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - - percolateSource = new PercolateSourceBuilder(); - percolateSource.setDoc(docBuilder().setDoc(new HashMap())); - assertThrows(client().preparePercolate() - .setIndices("no_index").setDocumentType("type1") - .setSource(percolateSource), - ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE - ); - assertThrows(client().admin().indices().prepareAnalyze("test", "this is a test"), ClusterBlockException.class, RestStatus.SERVICE_UNAVAILABLE diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index fabee819a11..261da3e2bb8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -56,8 +56,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongConsumer; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 298175c41ee..93c33a97cdc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -54,7 +54,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index eb62653978f..68027312581 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -43,8 +43,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.singleton; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; /** diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 708f8ca5079..f46224570b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -64,7 +64,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { UnassignedInfo.Reason.NODE_LEFT, UnassignedInfo.Reason.REROUTE_CANCELLED, UnassignedInfo.Reason.REINITIALIZED, - UnassignedInfo.Reason.REALLOCATED_REPLICA}; + UnassignedInfo.Reason.REALLOCATED_REPLICA, + UnassignedInfo.Reason.PRIMARY_FAILED}; for (int i = 0; i < order.length; i++) { assertThat(order[i].ordinal(), equalTo(i)); } @@ -72,7 +73,10 @@ public class UnassignedInfoTests extends ESAllocationTestCase { } public void testSerialization() throws Exception { - UnassignedInfo meta = new UnassignedInfo(RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()), randomBoolean() ? randomAsciiOfLength(4) : null); + UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()); + UnassignedInfo meta = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? + new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), System.currentTimeMillis()): + new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null); BytesStreamOutput out = new BytesStreamOutput(); meta.writeTo(out); out.close(); @@ -82,6 +86,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getMessage(), equalTo(meta.getMessage())); assertThat(read.getDetails(), equalTo(meta.getDetails())); + assertThat(read.getNumFailedAllocations(), equalTo(meta.getNumFailedAllocations())); } public void testIndexCreated() { @@ -273,7 +278,10 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testUnassignedDelayOnlyNodeLeftNonNodeLeftReason() throws Exception { EnumSet reasons = EnumSet.allOf(UnassignedInfo.Reason.class); reasons.remove(UnassignedInfo.Reason.NODE_LEFT); - UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(random(), reasons), null); + UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), reasons); + UnassignedInfo unassignedInfo = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? + new UnassignedInfo(reason, null, null, 1, System.nanoTime(), System.currentTimeMillis()): + new UnassignedInfo(reason, null); unassignedInfo = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); long delay = unassignedInfo.getLastComputedLeftDelayNanos(); @@ -287,7 +295,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { */ public void testLeftDelayCalculation() throws Exception { final long baseTime = System.nanoTime(); - UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis()); + UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, 0, baseTime, System.currentTimeMillis()); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); unassignedInfo = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 7aa8576ece3..b63692e0d2a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -94,7 +94,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().state(), equalTo(ShardRoutingState.RELOCATING)); @@ -148,7 +148,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-existent node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node42")), false, false); fail("expected IllegalArgumentException when allocating to non-existing node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes")); @@ -156,7 +156,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating to non-data node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(index, shardId.id(), "node4")), false, false); fail("expected IllegalArgumentException when allocating to non-data node"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocation can only be done on data nodes")); @@ -164,7 +164,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing shard, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 1, "node2")), false, false); fail("expected ShardNotFoundException when allocating non-existing shard"); } catch (ShardNotFoundException e) { assertThat(e.getMessage(), containsString("no such shard")); @@ -172,7 +172,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating non-existing index, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2")), false, false); fail("expected ShardNotFoundException when allocating non-existing index"); } catch (IndexNotFoundException e) { assertThat(e.getMessage(), containsString("no such index")); @@ -180,7 +180,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", false)), false, false); fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); @@ -188,14 +188,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocating stale primary with acceptDataLoss flag set to false"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(index, shardId.id(), "node1", false)), false, false); fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); } logger.info("--> allocating empty primary with acceptDataLoss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -211,13 +211,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocate the replica shard on the primary shard node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1"))); + allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node1")), false, false); fail("expected IllegalArgumentException when allocating replica shard on the primary shard node"); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -236,7 +236,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> verify that we fail when there are no unassigned shards"); try { - allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3"))); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test", 0, "node3")), false, false); fail("expected IllegalArgumentException when allocating shard while no unassigned shard available"); } catch (IllegalArgumentException e) { } @@ -268,7 +268,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); logger.info("--> allocating empty primary shard with accept_data_loss flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand("test", 0, "node1", true)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -277,7 +277,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } @@ -291,13 +291,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel primary allocation, make sure it fails..."); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -306,7 +306,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the relocation allocation"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -315,7 +315,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -325,7 +325,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary being replicated, make sure it fails"); try { - allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false))); + allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", false)), false, false); fail(); } catch (IllegalArgumentException e) { } @@ -339,7 +339,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> cancel allocation of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node2", false)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -348,7 +348,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand("test", 0, "node2")), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -364,7 +364,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node2").shardsWithState(STARTED).size(), equalTo(1)); logger.info("--> move the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3"))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, "node2", "node3")), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -374,7 +374,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").shardsWithState(INITIALIZING).size(), equalTo(1)); logger.info("--> cancel the move of the replica shard"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node3", false)), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); assertThat(clusterState.getRoutingNodes().node("node1").shardsWithState(STARTED).size(), equalTo(1)); @@ -383,7 +383,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> cancel the primary allocation (with allow_primary set to true)"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new CancelAllocationCommand("test", 0, "node1", true)), false, false); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); logger.error(clusterState.prettyPrint()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index ee993bf3ebd..805ab0321ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -868,7 +868,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase { } commands.add(new MoveAllocationCommand("test", 0, primaryNode, "A-4")); - routingTable = strategy.reroute(clusterState, commands).routingTable(); + routingTable = strategy.reroute(clusterState, commands, false, false).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index 11b78d2ae6a..b14aeca890e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -149,8 +149,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); @@ -223,8 +223,8 @@ public class DeadNodesAllocationTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test",0 , clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index bfa27a36d8b..29644f07944 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -149,7 +149,7 @@ public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { } else { toNodeId = "node1"; } - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", 0, existingNodeId, toNodeId)), false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertEquals(clusterState.getRoutingNodes().node(existingNodeId).iterator().next().state(), ShardRoutingState.RELOCATING); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 58e2397b043..e859c5811c3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -109,8 +109,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); @@ -125,8 +125,8 @@ public class FailedShardsRoutingTests extends ESAllocationTestCase { logger.info("--> moving primary shard to node3"); rerouteResult = allocation.reroute(clusterState, new AllocationCommands( - new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")) - ); + new MoveAllocationCommand("test", 0, clusterState.routingTable().index("test").shard(0).primaryShard().currentNodeId(), "node3")), + false, false); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node(origPrimaryNodeId).iterator().next().state(), equalTo(RELOCATING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java new file mode 100644 index 00000000000..f76851cfef9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java @@ -0,0 +1,210 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.gateway.NoopGatewayAllocator; + +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; + +public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { + + private AllocationService strategy; + + @Override + public void setUp() throws Exception { + super.setUp(); + strategy = new AllocationService(Settings.builder().build(), new AllocationDeciders(Settings.EMPTY, + Collections.singleton(new MaxRetryAllocationDecider(Settings.EMPTY))), + NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); + } + + private ClusterState createInitialClusterState() { + MetaData.Builder metaBuilder = MetaData.builder(); + metaBuilder.put(IndexMetaData.builder("idx").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(0)); + MetaData metaData = metaBuilder.build(); + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); + routingTableBuilder.addAsNew(metaData.index("idx")); + + RoutingTable routingTable = routingTableBuilder.build(); + ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData).routingTable(routingTable).build(); + clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))) + .build(); + RoutingTable prevRoutingTable = routingTable; + routingTable = strategy.reroute(clusterState, "reroute", false).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + assertEquals(prevRoutingTable.index("idx").shards().size(), 1); + assertEquals(prevRoutingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + return clusterState; + } + + public void testSingleRetryOnIgnore() { + ClusterState clusterState = createInitialClusterState(); + RoutingTable routingTable = clusterState.routingTable(); + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries-1; i++) { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom" + i); + } + // now we go and check that we are actually stick to unassigned on the next failure + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + result = strategy.reroute(clusterState, new AllocationCommands(), false, true); // manual reroute should retry once + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + // now we go and check that we are actually stick to unassigned on the next failure ie. no retry + failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + } + + public void testFailedAllocation() { + ClusterState clusterState = createInitialClusterState(); + RoutingTable routingTable = clusterState.routingTable(); + final int retries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY); + // now fail it N-1 times + for (int i = 0; i < retries-1; i++) { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom" + i, + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), i+1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom" + i); + } + // now we go and check that we are actually stick to unassigned on the next failure + { + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "boom", + new UnsupportedOperationException())); + RoutingAllocation.Result result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), UNASSIGNED); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + } + + // change the settings and ensure we can do another round of allocation for that index. + clusterState = ClusterState.builder(clusterState).routingTable(routingTable) + .metaData(MetaData.builder(clusterState.metaData()) + .put(IndexMetaData.builder(clusterState.metaData().index("idx")).settings( + Settings.builder().put(clusterState.metaData().index("idx").getSettings()).put("index.allocation.max_retries", + retries+1).build() + ).build(), true).build()).build(); + RoutingAllocation.Result result = strategy.reroute(clusterState, "settings changed", false); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + // good we are initializing and we are maintaining failure information + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), retries); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "boom"); + + // now we start the shard + routingTable = strategy.applyStartedShards(clusterState, Collections.singletonList(routingTable.index("idx") + .shard(0).shards().get(0))).routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + // all counters have been reset to 0 ie. no unassigned info + assertEquals(routingTable.index("idx").shards().size(), 1); + assertNull(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo()); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), STARTED); + + // now fail again and see if it has a new counter + List failedShards = Collections.singletonList( + new FailedRerouteAllocation.FailedShard(routingTable.index("idx").shard(0).shards().get(0), "ZOOOMG", + new UnsupportedOperationException())); + result = strategy.applyFailedShards(clusterState, failedShards); + assertTrue(result.changed()); + routingTable = result.routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + assertEquals(routingTable.index("idx").shards().size(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getNumFailedAllocations(), 1); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); + assertEquals(routingTable.index("idx").shard(0).shards().get(0).unassignedInfo().getMessage(), "ZOOOMG"); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 97a3003ab2f..d0fc64b4b6b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -337,7 +337,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); - RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); + RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true, false); // the two indices must stay as is, the replicas cannot move to oldNode2 because versions don't match state = ClusterState.builder(state).routingResult(result).build(); assertThat(result.routingTable().index(shard2.getIndex()).shardsWithState(ShardRoutingState.RELOCATING).size(), equalTo(0)); @@ -369,7 +369,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { AllocationService strategy = new MockAllocationService(Settings.EMPTY, allocationDeciders, NoopGatewayAllocator.INSTANCE, new BalancedShardsAllocator(Settings.EMPTY), EmptyClusterInfoService.INSTANCE); - RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true); + RoutingAllocation.Result result = strategy.reroute(state, new AllocationCommands(), true, false); // Make sure that primary shards are only allocated on the new node for (int i = 0; i < numberOfShards; i++) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 28d916e20c1..61a72bc352a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -283,7 +283,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node2"), 0); assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); - RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").iterator().next().shardId().id(), "node1", "node2"))); + RoutingAllocation.Result reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node1").iterator().next().shardId().id(), "node1", "node2")), false, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.YES); routingTable = reroute.routingTable(); @@ -296,7 +296,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertEquals(clusterState.getRoutingNodes().getOutgoingRecoveries("node3"), 0); // outgoing throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node1")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node1")), true, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); assertEquals(clusterState.getRoutingNodes().getIncomingRecoveries("node1"), 0); @@ -311,7 +311,7 @@ public class ThrottlingAllocationTests extends ESAllocationTestCase { assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(0)); // incoming throttles - reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node2")), true); + reroute = strategy.reroute(clusterState, new AllocationCommands(new MoveAllocationCommand("test", clusterState.getRoutingNodes().node("node3").iterator().next().shardId().id(), "node3", "node2")), true, false); assertEquals(reroute.explanations().explanations().size(), 1); assertEquals(reroute.explanations().explanations().get(0).decisions().type(), Decision.Type.THROTTLE); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 2c4e86ad4b1..579e87150a7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -796,7 +796,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { AllocationCommand relocate1 = new MoveAllocationCommand("test", 0, "node2", "node3"); AllocationCommands cmds = new AllocationCommands(relocate1); - routingTable = strategy.reroute(clusterState, cmds).routingTable(); + routingTable = strategy.reroute(clusterState, cmds, false, false).routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); logShardStates(clusterState); @@ -808,7 +808,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { // node3, which will put it over the low watermark when it // completes, with shard relocations taken into account this should // throw an exception about not being able to complete - strategy.reroute(clusterState, cmds).routingTable(); + strategy.reroute(clusterState, cmds, false, false).routingTable(); fail("should not have been able to reroute the shard"); } catch (IllegalArgumentException e) { assertThat("can't allocated because there isn't enough room: " + e.getMessage(), @@ -876,7 +876,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, - System.nanoTime()); + System.nanoTime(), false); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); @@ -896,7 +896,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ) ); clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); - routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime()); + routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), + false); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); @@ -992,7 +993,7 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, - System.nanoTime()); + System.nanoTime(), false); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); // Two shards should start happily @@ -1051,7 +1052,8 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ); clusterState = ClusterState.builder(updateClusterState).routingTable(builder.build()).build(); - routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime()); + routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), + false); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index d9e9fb95445..008884cbb8d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -136,7 +136,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { ImmutableOpenMap.Builder shardSizes = ImmutableOpenMap.builder(); shardSizes.put("[test][0][p]", 10L); // 10 bytes final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); assertEquals(mostAvailableUsage.toString(), Decision.YES, decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation)); assertEquals(mostAvailableUsage.toString(), Decision.NO, decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation)); } @@ -204,7 +204,7 @@ public class DiskThresholdDeciderUnitTests extends ESTestCase { shardSizes.put("[test][2][p]", 10L); final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), shardRoutingMap.build()); - RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, new AllocationDecider[]{decider}), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); assertEquals(Decision.YES, decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation)); assertEquals(Decision.NO, decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation)); try { diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index 7303d5a67df..81fc9d3752d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -64,7 +64,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index 255693669c1..c9dd0e26361 100644 --- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -320,6 +320,24 @@ public class RoutingIteratorTests extends ESAllocationTestCase { assertThat(shardsIterator.size(), equalTo(1)); assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2")); + shardsIterator = clusterState.routingTable().index("test").shard(0) + .onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:eph*","disk:ebs"},clusterState.nodes()); + assertThat(shardsIterator.size(), equalTo(2)); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2")); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1")); + + shardsIterator = clusterState.routingTable().index("test").shard(0) + .onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:*", "invalid_name"},clusterState.nodes()); + assertThat(shardsIterator.size(), equalTo(2)); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2")); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1")); + + shardsIterator = clusterState.routingTable().index("test").shard(0) + .onlyNodeSelectorActiveInitializingShardsIt(new String[] {"disk:*", "disk:*"},clusterState.nodes()); + assertThat(shardsIterator.size(), equalTo(2)); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node2")); + assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1")); + try { shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("welma", clusterState.nodes()); fail("should have raised illegalArgumentException"); diff --git a/core/src/test/java/org/elasticsearch/common/Base64Tests.java b/core/src/test/java/org/elasticsearch/common/Base64Tests.java deleted file mode 100644 index 74691c0b739..00000000000 --- a/core/src/test/java/org/elasticsearch/common/Base64Tests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common; - -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Locale; - -import static org.hamcrest.Matchers.is; - -/** - * - */ -public class Base64Tests extends ESTestCase { - // issue #6334 - public void testBase64DecodeWithExtraCharactersAfterPadding() throws Exception { - String plain = randomAsciiOfLengthBetween(1, 20) + ":" + randomAsciiOfLengthBetween(1, 20); - String encoded = Base64.encodeBytes(plain.getBytes(StandardCharsets.UTF_8)); - assertValidBase64(encoded, plain); - - // lets append some trash here, if the encoded string has been padded - char lastChar = encoded.charAt(encoded.length() - 1); - if (lastChar == '=') { - assertInvalidBase64(encoded + randomAsciiOfLength(3)); - } - } - - private void assertValidBase64(String base64, String expected) throws IOException { - String decoded = new String(Base64.decode(base64.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); - assertThat(decoded, is(expected)); - } - - private void assertInvalidBase64(String base64) { - try { - Base64.decode(base64.getBytes(StandardCharsets.UTF_8)); - fail(String.format(Locale.ROOT, "Expected IOException to be thrown for string %s (len %d)", base64, base64.length())); - } catch (IOException e) {} - } -} diff --git a/core/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java b/core/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java new file mode 100644 index 00000000000..09225914644 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/blobstore/BlobPathTests.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.blobstore; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class BlobPathTests extends ESTestCase { + + public void testBuildAsString() { + BlobPath path = new BlobPath(); + assertThat(path.buildAsString(), is("")); + + path = path.add("a"); + assertThat(path.buildAsString(), is("a/")); + + path = path.add("b").add("c"); + assertThat(path.buildAsString(), is("a/b/c/")); + + } +} diff --git a/core/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java b/core/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java index 96a4a3fdf35..b1d0abc96cf 100644 --- a/core/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java @@ -196,6 +196,8 @@ public class DateMathParserTests extends ESTestCase { public void testTimestamps() { assertDateMathEquals("1418248078000", "2014-12-10T21:47:58.000"); + assertDateMathEquals("32484216259000", "2999-05-20T17:24:19.000"); + assertDateMathEquals("253382837059000", "9999-05-20T17:24:19.000"); // datemath still works on timestamps assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); diff --git a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java index 5c812cca0a7..fabace237b2 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java @@ -27,6 +27,7 @@ import java.util.Arrays; import org.apache.log4j.Appender; import org.apache.log4j.Logger; +import org.apache.log4j.MDC; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index 08a4ba11342..61710b726d4 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -25,7 +25,6 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; -import java.util.ArrayList; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; @@ -234,19 +233,18 @@ public class TimeZoneRoundingTests extends ESTestCase { * amount of milliseconds. */ public void testSubHourNextRoundingEquallySpaced() { - String timeZone = randomFrom(new ArrayList<>(DateTimeZone.getAvailableIDs())); DateTimeUnit unit = randomFrom(new DateTimeUnit[] { DateTimeUnit.HOUR_OF_DAY, DateTimeUnit.MINUTES_OF_HOUR, DateTimeUnit.SECOND_OF_MINUTE }); - DateTimeZone tz = DateTimeZone.forID(timeZone); - TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, tz); + DateTimeZone timezone = randomDateTimeZone(); + TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, timezone); // move the random date to transition for timezones that have offset change due to dst transition - long nextTransition = tz.nextTransition(Math.abs(randomLong() % ((long) 10e11))); + long nextTransition = timezone.nextTransition(Math.abs(randomLong() % ((long) 10e11))); final long millisPerUnit = unit.field().getDurationField().getUnitMillis(); // start ten units before transition long roundedDate = rounding.round(nextTransition - (10 * millisPerUnit)); while (roundedDate < nextTransition + 10 * millisPerUnit) { long delta = rounding.nextRoundingValue(roundedDate) - roundedDate; - assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timeZone + "] at " + assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timezone + "] at " + new DateTime(roundedDate), millisPerUnit, delta); roundedDate = rounding.nextRoundingValue(roundedDate); } diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index aa8614aee71..3afd60d86e4 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -302,11 +302,8 @@ public class ScopedSettingsTests extends ESTestCase { public void testLoggingUpdates() { final String level = ESLoggerFactory.getRootLogger().getLevel(); final String testLevel = ESLoggerFactory.getLogger("test").getLevel(); - String property = System.getProperty("es.logger.level"); - Settings.Builder builder = Settings.builder(); - if (property != null) { - builder.put("logger.level", property); - } + String property = randomFrom(ESLoggerFactory.LogLevel.values()).toString(); + Settings.Builder builder = Settings.builder().put("logger.level", property); try { ClusterSettings settings = new ClusterSettings(builder.build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); try { @@ -319,7 +316,7 @@ public class ScopedSettingsTests extends ESTestCase { settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(property, ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger.test", "TRACE").build()); assertEquals("TRACE", ESLoggerFactory.getLogger("test").getLevel()); settings.applySettings(Settings.builder().build()); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 3539e54d943..346c5bc60de 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -31,7 +31,9 @@ import java.util.Set; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -42,31 +44,30 @@ public class SettingsTests extends ESTestCase { String value = System.getProperty("java.home"); assertFalse(value.isEmpty()); Settings settings = Settings.builder() - .put("setting1", "${java.home}") + .put("property.placeholder", value) + .put("setting1", "${property.placeholder}") .replacePropertyPlaceholders() .build(); assertThat(settings.get("setting1"), equalTo(value)); - - assertNull(System.getProperty("_test_property_should_not_exist")); - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:defaultVal1}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), equalTo("defaultVal1")); - - settings = Settings.builder() - .put("setting1", "${_test_property_should_not_exist:}") - .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); } - public void testReplacePropertiesPlaceholderIgnoreEnvUnset() { - Settings settings = Settings.builder() - .put("setting1", "${env.UNSET_ENV_VAR}") + public void testReplacePropertiesPlaceholderSystemVariablesHaveNoEffect() { + final String value = System.getProperty("java.home"); + assertNotNull(value); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Settings.builder() + .put("setting1", "${java.home}") .replacePropertyPlaceholders() - .build(); - assertThat(settings.get("setting1"), is(nullValue())); + .build()); + assertThat(e, hasToString(containsString("Could not resolve placeholder 'java.home'"))); + } + + public void testReplacePropertiesPlaceholderByEnvironmentVariables() { + final String hostname = randomAsciiOfLength(16); + final Settings implicitEnvSettings = Settings.builder() + .put("setting1", "${HOSTNAME}") + .replacePropertyPlaceholders(name -> "HOSTNAME".equals(name) ? hostname : null) + .build(); + assertThat(implicitEnvSettings.get("setting1"), equalTo(hostname)); } public void testReplacePropertiesPlaceholderIgnoresPrompt() { diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index 20568826d40..3cd68bea038 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -83,6 +83,9 @@ public class TimeValueTests extends ESTestCase { assertEquals(new TimeValue(10, TimeUnit.SECONDS), TimeValue.parseTimeValue("10S", null, "test")); + assertEquals(new TimeValue(100, TimeUnit.MILLISECONDS), + TimeValue.parseTimeValue("0.1s", null, "test")); + assertEquals(new TimeValue(10, TimeUnit.MINUTES), TimeValue.parseTimeValue("10 m", null, "test")); assertEquals(new TimeValue(10, TimeUnit.MINUTES), diff --git a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java index c4775b6cbf2..595394dbfa9 100644 --- a/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java +++ b/core/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java @@ -289,16 +289,18 @@ public class SimpleJodaTests extends ESTestCase { formatter.parser().parseDateTime("-100000000"); formatter.parser().parseDateTime("-999999999999"); formatter.parser().parseDateTime("-1234567890123"); + formatter.parser().parseDateTime("-1234567890123456789"); } else { formatter.parser().parseDateTime("-100000000"); formatter.parser().parseDateTime("-1234567890"); + formatter.parser().parseDateTime("-1234567890123456"); } } public void testForInvalidDatesInEpochSecond() { FormatDateTimeFormatter formatter = Joda.forPattern("epoch_second"); try { - formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901", "12345678901234")); + formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234567", "12345678901234567890")); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Invalid format")); @@ -308,7 +310,7 @@ public class SimpleJodaTests extends ESTestCase { public void testForInvalidDatesInEpochMillis() { FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis"); try { - formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234")); + formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234567890")); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Invalid format")); @@ -350,6 +352,8 @@ public class SimpleJodaTests extends ESTestCase { assertThat(dateTime.getMillis(), is(1234567890456L)); dateTime = formatter.parser().parseDateTime("1234567890789"); assertThat(dateTime.getMillis(), is(1234567890789L)); + dateTime = formatter.parser().parseDateTime("1234567890123456789"); + assertThat(dateTime.getMillis(), is(1234567890123456789L)); FormatDateTimeFormatter secondsFormatter = Joda.forPattern("epoch_second"); DateTime secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890"); @@ -358,6 +362,8 @@ public class SimpleJodaTests extends ESTestCase { assertThat(secondsDateTime.getMillis(), is(1234567890000L)); secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890"); assertThat(secondsDateTime.getMillis(), is(1234567890000L)); + secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890123456"); + assertThat(secondsDateTime.getMillis(), is(1234567890123456000L)); } public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index 1108e8335e9..d3da4513d95 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -58,8 +58,8 @@ import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 3a81b905eb5..b1e3ae157a7 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -65,8 +65,8 @@ import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.shuffle; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 01e717e6fa9..ee1cf7280e7 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -346,7 +346,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } /** @@ -425,7 +425,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(allocationDeciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } /** @@ -444,7 +444,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -452,7 +452,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node1, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -460,7 +460,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node2, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(0)); @@ -485,7 +485,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -493,7 +493,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node1, 1, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -501,7 +501,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas testAllocator.addData(node2, 2, null, randomBoolean()); - allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime()); + allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(0)); @@ -525,7 +525,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTableBuilder.build()) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, null, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } class TestAllocator extends PrimaryShardAllocator { diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 672c9de3d3e..20eb6286813 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -302,7 +302,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime(), false); } private RoutingAllocation onePrimaryOnNode1And1ReplicaRecovering(AllocationDeciders deciders) { @@ -324,7 +324,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { .metaData(metaData) .routingTable(routingTable) .nodes(DiscoveryNodes.builder().put(node1).put(node2).put(node3)).build(); - return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime()); + return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, ClusterInfo.EMPTY, System.nanoTime(), false); } class TestAllocator extends ReplicaShardAllocator { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java index 632df308e99..eeda96743df 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; @@ -53,6 +54,7 @@ public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase { .build(); } + @TestLogging("_root:DEBUG,org.elasticsearch.common.breaker:TRACE,org.elasticsearch.test:TRACE,org.elasticsearch.transport:TRACE") public void testLimitsInFlightRequests() throws Exception { ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 7beeaaee0a6..49e5d0c4595 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.ClusterServiceUtils; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -88,8 +88,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; -import static java.util.Collections.emptyMap; - public class IndexModuleTests extends ESTestCase { private Index index; private Settings settings; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java new file mode 100644 index 00000000000..31f80a66cda --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.elasticsearch.test.ESTestCase; + +public class CharMatcherTests extends ESTestCase { + + public void testLetter() { + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('a')); // category Ll + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('é')); // category Ll + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('A')); // category Lu + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('Å')); // category Lu + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('ʰ')); // category Lm + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('ª')); // category Lo + assertTrue(CharMatcher.Basic.LETTER.isTokenChar('Dž')); // category Lt + assertFalse(CharMatcher.Basic.LETTER.isTokenChar(' ')); + assertFalse(CharMatcher.Basic.LETTER.isTokenChar('0')); + assertFalse(CharMatcher.Basic.LETTER.isTokenChar('!')); + } + + public void testSpace() { + assertTrue(CharMatcher.Basic.WHITESPACE.isTokenChar(' ')); + assertTrue(CharMatcher.Basic.WHITESPACE.isTokenChar('\t')); + assertFalse(CharMatcher.Basic.WHITESPACE.isTokenChar('\u00A0')); // nbsp + } + + public void testNumber() { + assertTrue(CharMatcher.Basic.DIGIT.isTokenChar('1')); + assertTrue(CharMatcher.Basic.DIGIT.isTokenChar('١')); // ARABIC-INDIC DIGIT ONE + assertFalse(CharMatcher.Basic.DIGIT.isTokenChar(',')); + assertFalse(CharMatcher.Basic.DIGIT.isTokenChar('a')); + } + + public void testSymbol() { + assertTrue(CharMatcher.Basic.SYMBOL.isTokenChar('$')); // category Sc + assertTrue(CharMatcher.Basic.SYMBOL.isTokenChar('+')); // category Sm + assertTrue(CharMatcher.Basic.SYMBOL.isTokenChar('`')); // category Sm + assertTrue(CharMatcher.Basic.SYMBOL.isTokenChar('^')); // category Sk + assertTrue(CharMatcher.Basic.SYMBOL.isTokenChar('¦')); // category Sc + assertFalse(CharMatcher.Basic.SYMBOL.isTokenChar(' ')); + } + + public void testPunctuation() { + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('(')); // category Ps + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar(')')); // category Pe + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('_')); // category Pc + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('!')); // category Po + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('-')); // category Pd + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('«')); // category Pi + assertTrue(CharMatcher.Basic.PUNCTUATION.isTokenChar('»')); // category Pf + assertFalse(CharMatcher.Basic.PUNCTUATION.isTokenChar(' ')); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java index 8c1d530e448..ac412207021 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java @@ -26,13 +26,13 @@ import org.elasticsearch.test.ESTokenStreamTestCase; public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { public void testFingerprint() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "foo bar@baz Baz $ foo foo FOO. FoO", new String[]{"bar baz foo"}); } public void testReusableTokenStream() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "foo bar baz Baz foo foo FOO. FoO", new String[]{"bar baz foo"}); assertAnalyzesTo(a, "xyz XYZ abc 123.2 abc", @@ -40,7 +40,7 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { } public void testAsciifolding() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255); assertAnalyzesTo(a, "gödel escher bach", new String[]{"bach escher godel"}); @@ -48,14 +48,8 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { new String[]{"bach escher godel"}); } - public void testPreserveOriginal() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 255, true); - assertAnalyzesTo(a, "gödel escher bach", - new String[]{"bach escher godel gödel"}); - } - public void testLimit() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 3, false); + Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, ' ', 3); assertAnalyzesTo(a, "e d c b a", new String[]{}); @@ -63,10 +57,4 @@ public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { new String[]{"a b"}); } - public void testSeparator() throws Exception { - Analyzer a = new FingerprintAnalyzer(CharArraySet.EMPTY_SET, '_', 255, true); - assertAnalyzesTo(a, "b c a", - new String[]{"a_b_c"}); - } - } diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 0e20c22e119..6a84b62fabd 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -54,7 +54,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -111,6 +110,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -829,7 +829,7 @@ public class InternalEngineTests extends ESTestCase { engine.index(new Engine.Index(newUid("1"), doc)); Engine.CommitId commitID = engine.flush(); assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId()))); - byte[] wrongBytes = Base64.decode(commitID.toString()); + byte[] wrongBytes = Base64.getDecoder().decode(commitID.toString()); wrongBytes[0] = (byte) ~wrongBytes[0]; Engine.CommitId wrongId = new Engine.CommitId(wrongBytes); assertEquals("should fail to sync flush with wrong id (but no docs)", engine.syncFlush(syncId + "1", wrongId), diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index 7f9c4a3bbfe..26399218829 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -46,7 +46,7 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.CoreMatchers.instanceOf; public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java deleted file mode 100644 index 30986e45e4b..00000000000 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorQueryCacheTests.java +++ /dev/null @@ -1,390 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.core.SimpleAnalyzer; -import org.apache.lucene.document.BinaryDocValuesField; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.StoredField; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.TieredMergePolicy; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexWarmer; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.analysis.AnalysisService; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.PercolateQuery; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.WildcardQueryBuilder; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.index.warmer.ShardIndexWarmerService; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.threadpool.ThreadPool; - -import java.io.IOException; -import java.util.Collections; - -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class PercolatorQueryCacheTests extends ESTestCase { - - private QueryShardContext queryShardContext; - private PercolatorQueryCache cache; - - void initialize(Object... fields) throws IOException { - Settings settings = Settings.builder() - .put("node.name", PercolatorQueryCacheTests.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .build(); - - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(); - QueryParser termParser = TermQueryBuilder::fromXContent; - indicesQueriesRegistry.register(termParser, TermQueryBuilder.QUERY_NAME_FIELD); - QueryParser wildcardParser = WildcardQueryBuilder::fromXContent; - indicesQueriesRegistry.register(wildcardParser, WildcardQueryBuilder.QUERY_NAME_FIELD); - QueryParser boolQueryParser = BoolQueryBuilder::fromXContent; - indicesQueriesRegistry.register(boolQueryParser, BoolQueryBuilder.QUERY_NAME_FIELD); - - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("_index", ClusterState.UNKNOWN_UUID), indexSettings); - SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - MapperRegistry mapperRegistry = new IndicesModule().getMapperRegistry(); - MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, - () -> queryShardContext); - mapperService.merge("type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("type", fields).string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - cache = new PercolatorQueryCache(idxSettings, () -> queryShardContext); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - queryShardContext = new QueryShardContext(idxSettings, null, null, mapperService, similarityService, null, - indicesQueriesRegistry, null, cache, null, state); - } - - public void testLoadQueries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())) - .setMergePolicy(NoMergePolicy.INSTANCE) - ); - - boolean legacyFormat = randomBoolean(); - Version version = legacyFormat ? Version.V_2_0_0 : Version.CURRENT; - IndexShard indexShard = mockIndexShard(version, legacyFormat); - - storeQuery("0", indexWriter, termQuery("field1", "value1"), true, legacyFormat); - storeQuery("1", indexWriter, wildcardQuery("field1", "v*"), true, legacyFormat); - storeQuery("2", indexWriter, boolQuery().must(termQuery("field1", "value1")).must(termQuery("field2", "value2")), - true, legacyFormat); - // dymmy docs should be skipped during loading: - Document doc = new Document(); - doc.add(new StringField("dummy", "value", Field.Store.YES)); - indexWriter.addDocument(doc); - storeQuery("4", indexWriter, termQuery("field2", "value2"), true, legacyFormat); - // only documents that .percolator type should be loaded: - storeQuery("5", indexWriter, termQuery("field2", "value2"), false, legacyFormat); - storeQuery("6", indexWriter, termQuery("field3", "value3"), true, legacyFormat); - indexWriter.forceMerge(1); - - // also include queries for percolator docs marked as deleted: - indexWriter.deleteDocuments(new Term("id", "6")); - indexWriter.close(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); - assertThat(indexReader.leaves().size(), equalTo(1)); - assertThat(indexReader.numDeletedDocs(), equalTo(1)); - assertThat(indexReader.maxDoc(), equalTo(7)); - - initialize("field1", "type=keyword", "field2", "type=keyword", "field3", "type=keyword"); - - PercolatorQueryCache.QueriesLeaf leaf = cache.loadQueries(indexReader.leaves().get(0), indexShard); - assertThat(leaf.queries.size(), equalTo(5)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("field1", "value1")))); - assertThat(leaf.getQuery(1), equalTo(new WildcardQuery(new Term("field1", "v*")))); - assertThat(leaf.getQuery(2), equalTo(new BooleanQuery.Builder() - .add(new TermQuery(new Term("field1", "value1")), BooleanClause.Occur.MUST) - .add(new TermQuery(new Term("field2", "value2")), BooleanClause.Occur.MUST) - .build() - )); - assertThat(leaf.getQuery(4), equalTo(new TermQuery(new Term("field2", "value2")))); - assertThat(leaf.getQuery(6), equalTo(new TermQuery(new Term("field3", "value3")))); - - indexReader.close(); - directory.close(); - } - - public void testGetQueries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) - ); - - storeQuery("0", indexWriter, termQuery("a", "0"), true, false); - storeQuery("1", indexWriter, termQuery("a", "1"), true, false); - storeQuery("2", indexWriter, termQuery("a", "2"), true, false); - indexWriter.flush(); - storeQuery("3", indexWriter, termQuery("a", "3"), true, false); - storeQuery("4", indexWriter, termQuery("a", "4"), true, false); - storeQuery("5", indexWriter, termQuery("a", "5"), true, false); - indexWriter.flush(); - storeQuery("6", indexWriter, termQuery("a", "6"), true, false); - storeQuery("7", indexWriter, termQuery("a", "7"), true, false); - storeQuery("8", indexWriter, termQuery("a", "8"), true, false); - indexWriter.flush(); - indexWriter.close(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID , 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); - assertThat(indexReader.leaves().size(), equalTo(3)); - assertThat(indexReader.maxDoc(), equalTo(9)); - - initialize("a", "type=keyword"); - - try { - cache.getQueries(indexReader.leaves().get(0)); - fail("IllegalStateException expected"); - } catch (IllegalStateException e) { - assertThat(e.getMessage(), equalTo("queries not loaded, queries should be have been preloaded during index warming...")); - } - - IndexShard indexShard = mockIndexShard(Version.CURRENT, false); - ThreadPool threadPool = mockThreadPool(); - IndexWarmer.Listener listener = cache.createListener(threadPool); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - PercolatorQueryCacheStats stats = cache.getStats(shardId); - assertThat(stats.getNumQueries(), equalTo(9L)); - - PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "1")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "2")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "3")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "4")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "5")))); - - leaf = cache.getQueries(indexReader.leaves().get(2)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "6")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "7")))); - assertThat(leaf.getQuery(2), equalTo(new TermQuery(new Term("a", "8")))); - - indexReader.close(); - directory.close(); - } - - public void testInvalidateEntries() throws Exception { - Directory directory = newDirectory(); - IndexWriter indexWriter = new IndexWriter( - directory, - new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE) - ); - - storeQuery("0", indexWriter, termQuery("a", "0"), true, false); - indexWriter.flush(); - storeQuery("1", indexWriter, termQuery("a", "1"), true, false); - indexWriter.flush(); - storeQuery("2", indexWriter, termQuery("a", "2"), true, false); - indexWriter.flush(); - - ShardId shardId = new ShardId("_index", ClusterState.UNKNOWN_UUID, 0); - IndexReader indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(3)); - assertThat(indexReader.maxDoc(), equalTo(3)); - - initialize("a", "type=keyword"); - - IndexShard indexShard = mockIndexShard(Version.CURRENT, false); - ThreadPool threadPool = mockThreadPool(); - IndexWarmer.Listener listener = cache.createListener(threadPool); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(3L)); - - PercolateQuery.QueryRegistry.Leaf leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "1")))); - - leaf = cache.getQueries(indexReader.leaves().get(2)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); - - // change merge policy, so that merges will actually happen: - indexWriter.getConfig().setMergePolicy(new TieredMergePolicy()); - indexWriter.deleteDocuments(new Term("id", "1")); - indexWriter.forceMergeDeletes(); - indexReader.close(); - indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(2)); - assertThat(indexReader.maxDoc(), equalTo(2)); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); - - leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - - leaf = cache.getQueries(indexReader.leaves().get(1)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "2")))); - - indexWriter.forceMerge(1); - indexReader.close(); - indexReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(indexWriter), shardId); - assertThat(indexReader.leaves().size(), equalTo(1)); - assertThat(indexReader.maxDoc(), equalTo(2)); - listener.warmReader(indexShard, new Engine.Searcher("test", new IndexSearcher(indexReader))); - assertThat(cache.getStats(shardId).getNumQueries(), equalTo(2L)); - - leaf = cache.getQueries(indexReader.leaves().get(0)); - assertThat(leaf.getQuery(0), equalTo(new TermQuery(new Term("a", "0")))); - assertThat(leaf.getQuery(1), equalTo(new TermQuery(new Term("a", "2")))); - - indexWriter.close(); - indexReader.close(); - directory.close(); - } - - void storeQuery(String id, IndexWriter indexWriter, QueryBuilder queryBuilder, boolean typeField, boolean legacy) throws IOException { - Document doc = new Document(); - doc.add(new StringField("id", id, Field.Store.NO)); - if (typeField) { - if (legacy) { - doc.add(new StringField(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME, Field.Store.NO)); - } else { - doc.add(new StringField(TypeFieldMapper.NAME, "query", Field.Store.NO)); - } - } - if (legacy) { - BytesReference percolatorQuery = XContentFactory.jsonBuilder().startObject() - .field("query", queryBuilder) - .endObject().bytes(); - doc.add(new StoredField( - SourceFieldMapper.NAME, - percolatorQuery.array(), percolatorQuery.arrayOffset(), percolatorQuery.length()) - ); - } else { - BytesRef queryBuilderAsBytes = new BytesRef( - XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE).value(queryBuilder).bytes().toBytes() - ); - doc.add(new BinaryDocValuesField(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME, queryBuilderAsBytes)); - } - indexWriter.addDocument(doc); - } - - IndexShard mockIndexShard(Version version, boolean legacyFormat) { - IndexShard indexShard = mock(IndexShard.class); - ShardIndexWarmerService shardIndexWarmerService = mock(ShardIndexWarmerService.class); - when(shardIndexWarmerService.logger()).thenReturn(logger); - when(indexShard.warmerService()).thenReturn(shardIndexWarmerService); - IndexSettings indexSettings = new IndexSettings( - IndexMetaData.builder("_index").settings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_VERSION_CREATED, version) - ).build(), - Settings.EMPTY - ); - when(indexShard.indexSettings()).thenReturn(indexSettings); - - PercolatorFieldMapper.PercolatorFieldType fieldType = mock(PercolatorFieldMapper.PercolatorFieldType.class); - when(fieldType.name()).thenReturn("query"); - when(fieldType.getQueryBuilderFieldName()).thenReturn(PercolatorFieldMapper.QUERY_BUILDER_FIELD_NAME); - PercolatorFieldMapper percolatorFieldMapper = mock(PercolatorFieldMapper.class); - when(percolatorFieldMapper.fieldType()).thenReturn(fieldType); - MapperService mapperService = mock(MapperService.class); - DocumentMapper documentMapper = mock(DocumentMapper.class); - if (legacyFormat) { - when(documentMapper.type()).thenReturn(PercolatorFieldMapper.LEGACY_TYPE_NAME); - when(documentMapper.typeFilter()) - .thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME))); - } else { - when(documentMapper.type()).thenReturn("query"); - when(documentMapper.typeFilter()).thenReturn(new TermQuery(new Term(TypeFieldMapper.NAME, "query"))); - } - - Analyzer analyzer = new SimpleAnalyzer(); - DocumentFieldMappers documentFieldMappers = - new DocumentFieldMappers(Collections.singleton(percolatorFieldMapper), analyzer, analyzer, analyzer); - when(documentMapper.mappers()).thenReturn(documentFieldMappers); - - when(mapperService.docMappers(false)).thenReturn(Collections.singleton(documentMapper)); - - when(indexShard.mapperService()).thenReturn(mapperService); - - return indexShard; - } - - ThreadPool mockThreadPool() { - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.executor(anyString())).thenReturn(Runnable::run); - return threadPool; - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java index 0fe35530234..a2eec493c9c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractTermQueryTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import com.fasterxml.jackson.core.io.JsonStringEncoder; +import org.elasticsearch.test.AbstractQueryTestCase; import java.util.HashMap; import java.util.Map; diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index a49d410fdd6..15180b9d989 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index ecdadeca923..34c15d63577 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.BoostingQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index d7a48195343..0b293cd0157 100644 --- a/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -106,21 +107,21 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase public void testFromJson() throws IOException { String json = - "{\n" + - " \"ids\" : {\n" + - " \"type\" : [ \"my_type\" ],\n" + - " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + - " \"boost\" : 1.0\n" + - " }\n" + + "{\n" + + " \"ids\" : {\n" + + " \"type\" : [ \"my_type\" ],\n" + + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + + " \"boost\" : 1.0\n" + + " }\n" + "}"; IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json); checkGeneratedJson(json, parsed); diff --git a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java index c234016c465..12527a927c4 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; +import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -77,27 +78,27 @@ public class IndicesQueryBuilderTests extends AbstractQueryTestCase new MatchQueryBuilder(null, "value")); + assertEquals("[match] requires fieldName", e.getMessage()); } - try { - new MatchQueryBuilder("fieldName", null); - fail("value must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MatchQueryBuilder("fieldName", null)); + assertEquals("[match] requires query value", e.getMessage()); } MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); - try { - matchQuery.prefixLength(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.prefixLength(-1)); + assertEquals("[match] requires prefix length to be non-negative.", e.getMessage()); } - try { - matchQuery.maxExpansions(-1); - fail("must not be positive"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> matchQuery.maxExpansions(randomIntBetween(-10, 0))); + assertEquals("[match] requires maxExpansions to be positive.", e.getMessage()); } - try { - matchQuery.operator(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.operator(null)); + assertEquals("[match] requires operator to be non-null", e.getMessage()); } - try { - matchQuery.type(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.type(null)); + assertEquals("[match] requires type to be non-null", e.getMessage()); } - try { - matchQuery.zeroTermsQuery(null); - fail("must not be non-null"); - } catch (IllegalArgumentException ex) { - // expected + { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> matchQuery.zeroTermsQuery(null)); + assertEquals("[match] requires zeroTermsQuery to be non-null", e.getMessage()); } - } - public void testBadAnalyzer() throws IOException { - MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); matchQuery.analyzer("bogusAnalyzer"); - try { - matchQuery.toQuery(createShardContext()); - fail("Expected QueryShardException"); - } catch (QueryShardException e) { + { + QueryShardException e = expectThrows(QueryShardException.class, () -> matchQuery.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found")); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 31a11e36b0a..e4b0a6893ac 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; +import org.elasticsearch.test.AbstractQueryTestCase; import org.junit.Before; import java.io.IOException; @@ -304,30 +305,30 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase { @Override - public void setUp() throws Exception { - super.setUp(); - MapperService mapperService = createShardContext().getMapperService(); + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc", STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", diff --git a/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java index 070fb08dc67..89d0829e012 100644 --- a/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/ParentIdQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -42,25 +43,23 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase new SpanNearQueryBuilder(null, 1)); + assertEquals("[span_near] must include at least one clause", e.getMessage()); - try { SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); - spanNearQueryBuilder.clause(null); - fail("cannot be null"); - } catch (IllegalArgumentException e) { - // ecpected - } + e = expectThrows(IllegalArgumentException.class, () -> spanNearQueryBuilder.addClause(null)); + assertEquals("[span_near] clauses cannot be null", e.getMessage()); + } + + public void testClausesUnmodifiable() { + SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); + expectThrows(UnsupportedOperationException.class, + () -> spanNearQueryBuilder.clauses().add(new SpanTermQueryBuilder("field", "value2"))); } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java index 527b7e5e83a..ccfda9596f3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNotQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.spans.SpanNotQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -110,7 +111,7 @@ public class SpanNotQueryBuilderTests extends AbstractQueryTestCase new SpanOrQueryBuilder((SpanQueryBuilder) null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SpanOrQueryBuilder((SpanQueryBuilder) null)); + assertEquals("[span_or] must include at least one clause", e.getMessage()); - try { - SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(new SpanTermQueryBuilder("field", "value")); - spanOrBuilder.clause(null); - fail("cannot be null"); - } catch (IllegalArgumentException e) { - // expected - } + SpanOrQueryBuilder spanOrBuilder = new SpanOrQueryBuilder(new SpanTermQueryBuilder("field", "value")); + e = expectThrows(IllegalArgumentException.class, () -> spanOrBuilder.addClause(null)); + assertEquals("[span_or] inner clause cannot be null", e.getMessage()); + } + + public void testClausesUnmodifiable() { + SpanNearQueryBuilder spanNearQueryBuilder = new SpanNearQueryBuilder(new SpanTermQueryBuilder("field", "value"), 1); + expectThrows(UnsupportedOperationException.class, + () -> spanNearQueryBuilder.clauses().add(new SpanTermQueryBuilder("field", "value2"))); } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java index 3b7723be000..a05a2a1af81 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanWithinQueryBuilderTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; import org.apache.lucene.search.spans.SpanWithinQuery; +import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java index ee6621bef5b..d0548fd2780 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java @@ -28,7 +28,8 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; -import org.junit.BeforeClass; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.junit.Before; import java.io.IOException; import java.util.Collections; @@ -40,10 +41,10 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase { - @BeforeClass - public static void registerTestRandomScoreFunction() { - getSearchModule().registerScoreFunction(RandomScoreFunctionBuilderWithFixedSeed::new, - RandomScoreFunctionBuilderWithFixedSeed::fromXContent, RandomScoreFunctionBuilderWithFixedSeed.FUNCTION_NAME_FIELD); + + @Override + protected Collection> getPlugins() { + return Collections.singleton(TestPlugin.class); } @Override @@ -731,4 +736,23 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase errors = new CopyOnWriteArrayList<>(); logger.debug("using [{}] readers. [{}] writers. flushing every ~[{}] ops.", readers.length, writers.length, flushEveryOps); for (int i = 0; i < writers.length; i++) { - final String threadId = "writer_" + i; + final String threadName = "writer_" + i; + final int threadId = i; writers[i] = new Thread(new AbstractRunnable() { @Override public void doRun() throws BrokenBarrierException, InterruptedException, IOException { @@ -629,18 +629,21 @@ public class TranslogTests extends ESTestCase { if (existing != null) { fail("duplicate op [" + op + "], old entry at " + location); } + if (id % writers.length == threadId) { + translog.ensureSynced(location); + } writtenOpsLatch.get().countDown(); counter++; } - logger.debug("--> [{}] done. wrote [{}] ops.", threadId, counter); + logger.debug("--> [{}] done. wrote [{}] ops.", threadName, counter); } @Override public void onFailure(Throwable t) { - logger.error("--> writer [{}] had an error", t, threadId); + logger.error("--> writer [{}] had an error", t, threadName); errors.add(t); } - }, threadId); + }, threadName); writers[i].start(); } @@ -1262,12 +1265,12 @@ public class TranslogTests extends ESTestCase { case CREATE: case INDEX: op = new Translog.Index("test", threadId + "_" + opCount, - randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); + randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); break; case DELETE: op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000), + randomFrom(VersionType.values())); break; default: throw new ElasticsearchException("not supported op type"); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 3be16393e2a..e0446fe329b 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -36,10 +36,6 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequestBui import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateResponse; -import org.elasticsearch.action.percolate.PercolateRequestBuilder; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.MultiSearchRequestBuilder; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -57,7 +53,6 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; import java.util.function.Function; -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -75,7 +70,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception { - assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); + assertAcked(prepareCreate("test1")); ensureYellow(); // Verify defaults @@ -90,8 +85,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1", "test2"), true); verify(aliasExists("test1", "test2"), true); verify(typesExists("test1", "test2"), true); - verify(percolate("test1", "test2"), true); - verify(mpercolate(null, "test1", "test2"), false); verify(getAliases("test1", "test2"), true); verify(getFieldMapping("test1", "test2"), true); verify(getMapping("test1", "test2"), true); @@ -109,8 +102,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1", "test2").setIndicesOptions(options), true); verify(aliasExists("test1", "test2").setIndicesOptions(options), true); verify(typesExists("test1", "test2").setIndicesOptions(options), true); - verify(percolate("test1", "test2").setIndicesOptions(options), true); - verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false); verify(getAliases("test1", "test2").setIndicesOptions(options), true); verify(getFieldMapping("test1", "test2").setIndicesOptions(options), true); verify(getMapping("test1", "test2").setIndicesOptions(options), true); @@ -128,15 +119,13 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); - verify(percolate("test1", "test2").setIndicesOptions(options), false); - verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false); verify(getAliases("test1", "test2").setIndicesOptions(options), false); verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false); verify(getMapping("test1", "test2").setIndicesOptions(options), false); verify(getSettings("test1", "test2").setIndicesOptions(options), false); options = IndicesOptions.strictExpandOpen(); - assertAcked(prepareCreate("test2").addMapping("query", "query", "type=percolator")); + assertAcked(prepareCreate("test2")); ensureYellow(); verify(search("test1", "test2").setIndicesOptions(options), false); verify(msearch(options, "test1", "test2").setIndicesOptions(options), false); @@ -149,8 +138,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1", "test2").setIndicesOptions(options), false); verify(aliasExists("test1", "test2").setIndicesOptions(options), false); verify(typesExists("test1", "test2").setIndicesOptions(options), false); - verify(percolate("test1", "test2").setIndicesOptions(options), false); - verify(mpercolate(options, "test1", "test2").setIndicesOptions(options), false); verify(getAliases("test1", "test2").setIndicesOptions(options), false); verify(getFieldMapping("test1", "test2").setIndicesOptions(options), false); verify(getMapping("test1", "test2").setIndicesOptions(options), false); @@ -158,7 +145,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { } public void testSpecifiedIndexUnavailableSingleIndexThatIsClosed() throws Exception { - assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); + assertAcked(prepareCreate("test1")); // we need to wait until all shards are allocated since recovery from // gateway will fail unless the majority of the replicas was allocated // pre-closing. with lots of replicas this will fail. @@ -178,8 +165,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); - verify(percolate("test1").setIndicesOptions(options), true); - verify(mpercolate(options, "test1").setIndicesOptions(options), true); verify(getAliases("test1").setIndicesOptions(options), true); verify(getFieldMapping("test1").setIndicesOptions(options), true); verify(getMapping("test1").setIndicesOptions(options), true); @@ -197,8 +182,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); - verify(percolate("test1").setIndicesOptions(options), false); - verify(mpercolate(options, "test1").setIndicesOptions(options), false); verify(getAliases("test1").setIndicesOptions(options), false); verify(getFieldMapping("test1").setIndicesOptions(options), false); verify(getMapping("test1").setIndicesOptions(options), false); @@ -219,8 +202,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); - verify(percolate("test1").setIndicesOptions(options), false); - verify(mpercolate(options, "test1").setIndicesOptions(options), false); verify(getAliases("test1").setIndicesOptions(options), false); verify(getFieldMapping("test1").setIndicesOptions(options), false); verify(getMapping("test1").setIndicesOptions(options), false); @@ -240,7 +221,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), true); verify(aliasExists("test1").setIndicesOptions(options), true); verify(typesExists("test1").setIndicesOptions(options), true); - verify(percolate("test1").setIndicesOptions(options), true); verify(getAliases("test1").setIndicesOptions(options), true); verify(getFieldMapping("test1").setIndicesOptions(options), true); verify(getMapping("test1").setIndicesOptions(options), true); @@ -258,13 +238,12 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); - verify(percolate("test1").setIndicesOptions(options), false); verify(getAliases("test1").setIndicesOptions(options), false); verify(getFieldMapping("test1").setIndicesOptions(options), false); verify(getMapping("test1").setIndicesOptions(options), false); verify(getSettings("test1").setIndicesOptions(options), false); - assertAcked(prepareCreate("test1").addMapping("query", "query", "type=percolator")); + assertAcked(prepareCreate("test1")); ensureYellow(); options = IndicesOptions.strictExpandOpenAndForbidClosed(); @@ -279,7 +258,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery("test1").setIndicesOptions(options), false); verify(aliasExists("test1").setIndicesOptions(options), false); verify(typesExists("test1").setIndicesOptions(options), false); - verify(percolate("test1").setIndicesOptions(options), false); verify(getAliases("test1").setIndicesOptions(options), false); verify(getFieldMapping("test1").setIndicesOptions(options), false); verify(getMapping("test1").setIndicesOptions(options), false); @@ -330,8 +308,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); - verify(percolate(indices), false); - verify(mpercolate(null, indices), false); verify(getAliases(indices), false); verify(getFieldMapping(indices), false); verify(getMapping(indices), false); @@ -350,14 +326,12 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); - verify(percolate(indices).setIndicesOptions(options), false); - verify(mpercolate(options, indices), false); verify(getAliases(indices).setIndicesOptions(options), false); verify(getFieldMapping(indices).setIndicesOptions(options), false); verify(getMapping(indices).setIndicesOptions(options), false); verify(getSettings(indices).setIndicesOptions(options), false); - assertAcked(prepareCreate("foobar").addMapping("query", "query", "type=percolator")); + assertAcked(prepareCreate("foobar")); client().prepareIndex("foobar", "type", "1").setSource("k", "v").setRefresh(true).execute().actionGet(); // Verify defaults for wildcards, with one wildcard expression and one existing index @@ -373,8 +347,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery(indices), false); verify(aliasExists(indices), false); verify(typesExists(indices), false); - verify(percolate(indices), false); - verify(mpercolate(null, indices), false); verify(getAliases(indices), false); verify(getFieldMapping(indices), false); verify(getMapping(indices), false); @@ -393,8 +365,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery(indices), true); verify(aliasExists(indices), false); verify(typesExists(indices), false); - verify(percolate(indices), false); - verify(mpercolate(null, indices), false); verify(getAliases(indices), false); verify(getFieldMapping(indices), false); verify(getMapping(indices), false); @@ -413,8 +383,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(validateQuery(indices).setIndicesOptions(options), false); verify(aliasExists(indices).setIndicesOptions(options), false); verify(typesExists(indices).setIndicesOptions(options), false); - verify(percolate(indices).setIndicesOptions(options), false); - verify(mpercolate(options, indices), false); verify(getAliases(indices).setIndicesOptions(options), false); verify(getFieldMapping(indices).setIndicesOptions(options), false); verify(getMapping(indices).setIndicesOptions(options), false); @@ -724,20 +692,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { return client().admin().indices().prepareTypesExists(indices).setTypes("dummy"); } - private static PercolateRequestBuilder percolate(String... indices) { - return client().preparePercolate().setIndices(indices) - .setSource(new PercolateSourceBuilder().setDoc(docBuilder().setDoc("k", "v"))) - .setDocumentType("type"); - } - - private static MultiPercolateRequestBuilder mpercolate(IndicesOptions options, String... indices) { - MultiPercolateRequestBuilder builder = client().prepareMultiPercolate(); - if (options != null) { - builder.setIndicesOptions(options); - } - return builder.add(percolate(indices)); - } - private static GetAliasesRequestBuilder getAliases(String... indices) { return client().admin().indices().prepareGetAliases("dummy").addIndices(indices); } @@ -776,11 +730,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { assertThat(multiSearchResponse.getResponses().length, equalTo(1)); assertThat(multiSearchResponse.getResponses()[0].isFailure(), is(true)); assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue()); - } else if (requestBuilder instanceof MultiPercolateRequestBuilder) { - MultiPercolateResponse multiPercolateResponse = ((MultiPercolateRequestBuilder) requestBuilder).get(); - assertThat(multiPercolateResponse.getItems().length, equalTo(1)); - assertThat(multiPercolateResponse.getItems()[0].isFailure(), is(true)); - assertThat(multiPercolateResponse.getItems()[0].getResponse(), nullValue()); } else { try { requestBuilder.get(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index dc533737886..81c50cc4f9c 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -103,7 +103,7 @@ public class RareClusterStateIT extends ESIntegTestCase { .nodes(DiscoveryNodes.EMPTY_NODES) .build(), false ); - RoutingAllocation routingAllocation = new RoutingAllocation(allocationDeciders, routingNodes, current, ClusterInfo.EMPTY, System.nanoTime()); + RoutingAllocation routingAllocation = new RoutingAllocation(allocationDeciders, routingNodes, current, ClusterInfo.EMPTY, System.nanoTime(), false); allocator.allocateUnassigned(routingAllocation); } diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 4716e7dba78..92c3260aeb0 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -670,7 +670,7 @@ public class IndexStatsIT extends ESIntegTestCase { public void testFlagOrdinalOrder() { Flag[] flags = new Flag[]{Flag.Store, Flag.Indexing, Flag.Get, Flag.Search, Flag.Merge, Flag.Flush, Flag.Refresh, - Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.PercolatorCache, Flag.Completion, Flag.Segments, + Flag.QueryCache, Flag.FieldData, Flag.Docs, Flag.Warmer, Flag.Completion, Flag.Segments, Flag.Translog, Flag.Suggest, Flag.RequestCache, Flag.Recovery}; assertThat(flags.length, equalTo(Flag.values().length)); @@ -913,9 +913,6 @@ public class IndexStatsIT extends ESIntegTestCase { case Warmer: builder.setWarmer(set); break; - case PercolatorCache: - builder.setPercolate(set); - break; case Completion: builder.setCompletion(set); break; @@ -963,8 +960,6 @@ public class IndexStatsIT extends ESIntegTestCase { return response.getStore() != null; case Warmer: return response.getWarmer() != null; - case PercolatorCache: - return response.getPercolatorCache() != null; case Completion: return response.getCompletion() != null; case Segments: diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index cf28de64b87..f877ae6629b 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -50,7 +50,7 @@ import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.Version.CURRENT; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.VersionUtils.randomVersion; /** diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index e5f20499435..111133a0521 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -154,7 +155,8 @@ public class IngestClientIT extends ESIntegTestCase { BulkItemResponse itemResponse = response.getItems()[i]; if (i % 2 == 0) { BulkItemResponse.Failure failure = itemResponse.getFailure(); - assertThat(failure.getMessage(), equalTo("java.lang.IllegalArgumentException: test processor failed")); + ElasticsearchException compoundProcessorException = (ElasticsearchException) failure.getCause(); + assertThat(compoundProcessorException.getRootCause().getMessage(), equalTo("test processor failed")); } else { IndexResponse indexResponse = itemResponse.getResponse(); assertThat("Expected a successful response but found failure [" + itemResponse.getFailure() + "].", diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 3c0de328c8c..254057d2ede 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -188,6 +189,8 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteSuccessWithOnFailure() throws Exception { Processor processor = mock(Processor.class); + when(processor.getType()).thenReturn("mock_processor_type"); + when(processor.getTag()).thenReturn("mock_processor_tag"); Processor onFailureProcessor = mock(Processor.class); CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(processor), Collections.singletonList(new CompoundProcessor(onFailureProcessor))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", compoundProcessor)); @@ -198,7 +201,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - verify(failureHandler, never()).accept(any(RuntimeException.class)); + verify(failureHandler, never()).accept(any(ElasticsearchException.class)); verify(completionHandler, times(1)).accept(true); } diff --git a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java index 7bc8922af41..b4ee7eca07c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/core/CompoundProcessorTests.java @@ -19,21 +19,17 @@ package org.elasticsearch.ingest.core; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.processor.AppendProcessor; -import org.elasticsearch.ingest.processor.SetProcessor; -import org.elasticsearch.ingest.processor.SplitProcessor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; public class CompoundProcessorTests extends ESTestCase { @@ -70,8 +66,8 @@ public class CompoundProcessorTests extends ESTestCase { try { compoundProcessor.execute(ingestDocument); fail("should throw exception"); - } catch (Exception e) { - assertThat(e.getMessage(), equalTo("error")); + } catch (ElasticsearchException e) { + assertThat(e.getRootCause().getMessage(), equalTo("error")); } assertThat(processor.getInvokedCounter(), equalTo(1)); } @@ -117,4 +113,68 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(processorToFail.getInvokedCounter(), equalTo(1)); assertThat(lastProcessor.getInvokedCounter(), equalTo(1)); } + + public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception { + TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.entrySet(), hasSize(3)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("id1")); + }); + + CompoundProcessor failCompoundProcessor = new CompoundProcessor(firstProcessor); + + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(failCompoundProcessor), + Collections.singletonList(secondProcessor)); + compoundProcessor.execute(ingestDocument); + + assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); + assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + } + + public void testCompoundProcessorExceptionFail() throws Exception { + TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); + TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.entrySet(), hasSize(3)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); + }); + + CompoundProcessor failCompoundProcessor = new CompoundProcessor(Collections.singletonList(firstProcessor), + Collections.singletonList(failProcessor)); + + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(failCompoundProcessor), + Collections.singletonList(secondProcessor)); + compoundProcessor.execute(ingestDocument); + + assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); + assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + } + + public void testCompoundProcessorExceptionFailInOnFailure() throws Exception { + TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");}); + TestProcessor failProcessor = new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");}); + TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> { + Map ingestMetadata = ingestDocument.getIngestMetadata(); + assertThat(ingestMetadata.entrySet(), hasSize(3)); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail")); + assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("tag_fail")); + }); + + CompoundProcessor failCompoundProcessor = new CompoundProcessor(Collections.singletonList(firstProcessor), + Collections.singletonList(new CompoundProcessor(failProcessor))); + + CompoundProcessor compoundProcessor = new CompoundProcessor(Collections.singletonList(failCompoundProcessor), + Collections.singletonList(secondProcessor)); + compoundProcessor.execute(ingestDocument); + + assertThat(firstProcessor.getInvokedCounter(), equalTo(1)); + assertThat(secondProcessor.getInvokedCounter(), equalTo(1)); + } } diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java index 1eecb1397d3..68b5d175509 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/DateProcessorFactoryTests.java @@ -120,7 +120,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); - DateTimeZone timezone = randomTimezone(); + DateTimeZone timezone = randomDateTimeZone(); config.put("timezone", timezone.getID()); DateProcessor processor = factory.create(config); assertThat(processor.getTimezone(), equalTo(timezone)); @@ -141,14 +141,6 @@ public class DateProcessorFactoryTests extends ESTestCase { } } - //we generate a timezone out of the available ones in joda, some available in the jdk are not available in joda by default - private static DateTimeZone randomTimezone() { - List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); - Collections.sort(ids); - return DateTimeZone.forID(randomFrom(ids)); - } - - public void testParseMatchFormats() throws Exception { DateProcessor.Factory factory = new DateProcessor.Factory(); Map config = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/ingest/processor/TrackingResultProcessorTests.java b/core/src/test/java/org/elasticsearch/ingest/processor/TrackingResultProcessorTests.java index e53eec56cf1..2b53a9d08bc 100644 --- a/core/src/test/java/org/elasticsearch/ingest/processor/TrackingResultProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/processor/TrackingResultProcessorTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.ingest.processor; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.core.CompoundProcessor; @@ -73,8 +74,9 @@ public class TrackingResultProcessorTests extends ESTestCase { try { trackingProcessor.execute(ingestDocument); - } catch (Exception e) { - assertThat(e.getMessage(), equalTo(exception.getMessage())); + fail("processor should throw exception"); + } catch (ElasticsearchException e) { + assertThat(e.getRootCause().getMessage(), equalTo(exception.getMessage())); } SimulateProcessorResult expectedFirstResult = new SimulateProcessorResult(testProcessor.getTag(), ingestDocument); @@ -121,8 +123,8 @@ public class TrackingResultProcessorTests extends ESTestCase { metadata = resultList.get(3).getIngestDocument().getIngestMetadata(); assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("compound")); - assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("CompoundProcessor-fail-success-success-fail")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test")); + assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail")); assertThat(resultList.get(3).getFailure(), nullValue()); assertThat(resultList.get(3).getProcessorTag(), equalTo(expectedSuccessResult.getProcessorTag())); } diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index e1397ca47f1..87abc20a0de 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.node.internal; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; - import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Settings; @@ -33,6 +28,11 @@ import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -134,7 +134,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { Files.createDirectory(config); Files.copy(garbage, config.resolve("elasticsearch.yml")); InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { @@ -153,7 +152,6 @@ public class InternalSettingsPreparerTests extends ESTestCase { try { InternalSettingsPreparer.prepareEnvironment(Settings.builder() - .put("config.ignore_system_properties", true) .put(baseEnvSettings) .build(), null); } catch (SettingsException e) { diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 7783f9cbf83..bc9909b21c1 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.recovery; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.util.English; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -52,6 +53,7 @@ import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; @@ -71,12 +73,15 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -428,6 +433,62 @@ public class RelocationIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18553") + public void testIndexAndRelocateConcurrently() throws ExecutionException, InterruptedException { + Settings blueSetting = Settings.builder().put("node.attr.color", "blue").build(); + InternalTestCluster.Async> blueFuture = internalCluster().startNodesAsync(blueSetting, blueSetting); + Settings redSetting = Settings.builder().put("node.attr.color", "red").build(); + InternalTestCluster.Async> redFuture = internalCluster().startNodesAsync(redSetting, redSetting); + blueFuture.get(); + redFuture.get(); + logger.info("blue nodes: {}", blueFuture.get()); + logger.info("red nodes: {}", redFuture.get()); + ensureStableCluster(4); + + assertAcked(prepareCreate("test").setSettings(Settings.builder() + .put("index.routing.allocation.exclude.color", "blue") + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(indexSettings()))); + ensureYellow(); + assertAllShardsOnNodes("test", redFuture.get().toArray(new String[2])); + int numDocs = randomIntBetween(100, 150); + ArrayList ids = new ArrayList<>(); + logger.info(" --> indexing [{}] docs", numDocs); + IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; + for (int i = 0; i < numDocs; i++) { + String id = randomRealisticUnicodeOfLength(10) + String.valueOf(i); + ids.add(id); + docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(i)); + } + indexRandom(true, docs); + SearchResponse countResponse = client().prepareSearch("test").get(); + assertHitCount(countResponse, numDocs); + + logger.info(" --> moving index to new nodes"); + Settings build = Settings.builder().put("index.routing.allocation.exclude.color", "red") + .put("index.routing.allocation.include.color", "blue").build(); + client().admin().indices().prepareUpdateSettings("test").setSettings(build).execute().actionGet(); + + // index while relocating + logger.info(" --> indexing [{}] more docs", numDocs); + for (int i = 0; i < numDocs; i++) { + String id = randomRealisticUnicodeOfLength(10) + String.valueOf(numDocs + i); + ids.add(id); + docs[i] = client().prepareIndex("test", "type1", id).setSource("field1", English.intToEnglish(numDocs + i)); + } + indexRandom(true, docs); + numDocs *= 2; + + logger.info(" --> waiting for relocation to complete", numDocs); + ensureGreen("test");// move all shards to the new node (it waits on relocation) + final int numIters = randomIntBetween(10, 20); + for (int i = 0; i < numIters; i++) { + SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get(); + assertNoFailures(afterRelocation); + assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()])); + } + } + class RecoveryCorruption extends MockTransportService.DelegateTransport { private final CountDownLatch corruptionCount; diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index 306f3813e9b..aa3b11e6250 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.Index; +import org.elasticsearch.rest.support.RestUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -35,8 +36,11 @@ import java.io.IOException; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * @@ -147,6 +151,32 @@ public class BytesRestResponseTests extends ESTestCase { assertTrue(stackTrace.contains("Caused by: ParsingException[foobar]")); } + public void testResponseWhenPathContainsEncodingError() throws IOException { + final String path = "%a"; + final RestRequest request = mock(RestRequest.class); + when(request.rawPath()).thenReturn(path); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestUtils.decodeComponent(request.rawPath())); + final RestChannel channel = new DetailedExceptionRestChannel(request); + // if we try to decode the path, this will throw an IllegalArgumentException again + final BytesRestResponse response = new BytesRestResponse(channel, e); + assertNotNull(response.content()); + final String content = response.content().toUtf8(); + assertThat(content, containsString("\"type\":\"illegal_argument_exception\"")); + assertThat(content, containsString("\"reason\":\"partial escape sequence at end of string: %a\"")); + assertThat(content, containsString("\"status\":" + 400)); + } + + public void testResponseWhenInternalServerError() throws IOException { + final RestRequest request = new FakeRestRequest(); + final RestChannel channel = new DetailedExceptionRestChannel(request); + final BytesRestResponse response = new BytesRestResponse(channel, new ElasticsearchException("simulated")); + assertNotNull(response.content()); + final String content = response.content().toUtf8(); + assertThat(content, containsString("\"type\":\"exception\"")); + assertThat(content, containsString("\"reason\":\"simulated\"")); + assertThat(content, containsString("\"status\":" + 500)); + } + public static class WithHeadersException extends ElasticsearchException { WithHeadersException() { diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index e5f78d7474e..fefe55b9ec3 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.script.MockScriptEngine.MockCompiledScript; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.test.ESTestCase; import java.nio.file.Files; @@ -46,7 +45,7 @@ public class FileScriptTests extends ESTestCase { .put(settings) .build(); Set engines = new HashSet<>(Collections.singletonList(new MockScriptEngine())); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, ScriptMode.ON))); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 045f62ff8fe..5e1dc740f9e 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -75,10 +74,10 @@ public class NativeScriptTests extends ESTestCase { Settings.Builder builder = Settings.builder(); if (randomBoolean()) { ScriptType scriptType = randomFrom(ScriptType.values()); - builder.put("script" + "." + scriptType.getScriptType(), randomFrom(ScriptMode.values())); + builder.put("script" + "." + scriptType.getScriptType(), randomBoolean()); } else { ScriptContext scriptContext = randomFrom(ScriptContext.Standard.values()); - builder.put("script" + "." + scriptContext.getKey(), randomFrom(ScriptMode.values())); + builder.put("script" + "." + scriptContext.getKey(), randomBoolean()); } Settings settings = builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(settings); @@ -86,7 +85,7 @@ public class NativeScriptTests extends ESTestCase { Map nativeScriptFactoryMap = new HashMap<>(); nativeScriptFactoryMap.put("my", new MyNativeScriptFactory()); Set scriptEngineServices = singleton(new NativeScriptEngineService(settings, nativeScriptFactoryMap)); - ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME, ScriptMode.ON))); + ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<>()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 8405366f34d..715694fe890 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -46,7 +45,7 @@ public class ScriptContextTests extends ESTestCase { .build(); Set engines = new HashSet<>(Collections.singletonList(new MockScriptEngine())); ScriptEngineRegistry.ScriptEngineRegistration registration = - new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, ScriptMode.ON); + new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true); ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(registration)); List customContexts = Arrays.asList( new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), diff --git a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java index 800c079b889..8f02bee234d 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptModesTests.java @@ -83,7 +83,7 @@ public class ScriptModesTests extends ESTestCase { @After public void assertNativeScriptsAreAlwaysAllowed() { if (assertScriptModesNonNull) { - assertThat(scriptModes.getScriptMode(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(ScriptMode.ON)); + assertThat(scriptModes.getScriptEnabled(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true)); } } @@ -93,7 +93,7 @@ public class ScriptModesTests extends ESTestCase { assertThat(scriptModes, notNullValue()); int numberOfSettings = ScriptType.values().length * scriptContextRegistry.scriptContexts().size(); numberOfSettings += 3; // for top-level inline/store/file settings - assertThat(scriptModes.scriptModes.size(), equalTo(numberOfSettings)); + assertThat(scriptModes.scriptEnabled.size(), equalTo(numberOfSettings)); if (assertAllSettingsWereChecked) { assertThat(checkedSettings.size(), equalTo(numberOfSettings)); } @@ -102,15 +102,15 @@ public class ScriptModesTests extends ESTestCase { public void testDefaultSettings() { this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); - assertScriptModesAllOps(ScriptMode.ON, ScriptType.FILE); - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.STORED, ScriptType.INLINE); + assertScriptModesAllOps(true, ScriptType.FILE); + assertScriptModesAllOps(false, ScriptType.STORED, ScriptType.INLINE); } public void testMissingSetting() { assertAllSettingsWereChecked = false; this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); try { - scriptModes.getScriptMode("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts)); + scriptModes.getScriptEnabled("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not found for lang [non_existing]")); @@ -120,13 +120,13 @@ public class ScriptModesTests extends ESTestCase { public void testScriptTypeGenericSettings() { int randomInt = randomIntBetween(1, ScriptType.values().length - 1); Set randomScriptTypesSet = new HashSet<>(); - ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; + boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptTypesSet.add(randomFrom(ScriptType.values())); } - randomScriptModes[i] = randomFrom(ScriptMode.values()); + randomScriptModes[i] = randomBoolean(); } ScriptType[] randomScriptTypes = randomScriptTypesSet.toArray(new ScriptType[randomScriptTypesSet.size()]); Settings.Builder builder = Settings.builder(); @@ -139,26 +139,26 @@ public class ScriptModesTests extends ESTestCase { assertScriptModesAllOps(randomScriptModes[i], randomScriptTypes[i]); } if (randomScriptTypesSet.contains(ScriptType.FILE) == false) { - assertScriptModesAllOps(ScriptMode.ON, ScriptType.FILE); + assertScriptModesAllOps(true, ScriptType.FILE); } if (randomScriptTypesSet.contains(ScriptType.STORED) == false) { - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.STORED); + assertScriptModesAllOps(false, ScriptType.STORED); } if (randomScriptTypesSet.contains(ScriptType.INLINE) == false) { - assertScriptModesAllOps(ScriptMode.OFF, ScriptType.INLINE); + assertScriptModesAllOps(false, ScriptType.INLINE); } } public void testScriptContextGenericSettings() { int randomInt = randomIntBetween(1, scriptContexts.length - 1); Set randomScriptContextsSet = new HashSet<>(); - ScriptMode[] randomScriptModes = new ScriptMode[randomInt]; + boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptContextsSet.add(randomFrom(scriptContexts)); } - randomScriptModes[i] = randomFrom(ScriptMode.values()); + randomScriptModes[i] = randomBoolean(); } ScriptContext[] randomScriptContexts = randomScriptContextsSet.toArray(new ScriptContext[randomScriptContextsSet.size()]); Settings.Builder builder = Settings.builder(); @@ -172,8 +172,8 @@ public class ScriptModesTests extends ESTestCase { } ScriptContext[] complementOf = complementOf(randomScriptContexts); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.FILE}, complementOf); - assertScriptModes(ScriptMode.OFF, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.FILE}, complementOf); + assertScriptModes(false, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf); } public void testConflictingScriptTypeAndOpGenericSettings() { @@ -184,28 +184,28 @@ public class ScriptModesTests extends ESTestCase { .put("script.inline", "true"); //operations generic settings have precedence over script type generic settings this.scriptModes = new ScriptModes(scriptSettings, builder.build()); - assertScriptModesAllTypes(ScriptMode.OFF, scriptContext); + assertScriptModesAllTypes(false, scriptContext); ScriptContext[] complementOf = complementOf(scriptContext); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf); - assertScriptModes(ScriptMode.ON, new ScriptType[]{ScriptType.INLINE}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf); + assertScriptModes(true, new ScriptType[]{ScriptType.INLINE}, complementOf); } - private void assertScriptModesAllOps(ScriptMode expectedScriptMode, ScriptType... scriptTypes) { - assertScriptModes(expectedScriptMode, scriptTypes, scriptContexts); + private void assertScriptModesAllOps(boolean expectedScriptEnabled, ScriptType... scriptTypes) { + assertScriptModes(expectedScriptEnabled, scriptTypes, scriptContexts); } - private void assertScriptModesAllTypes(ScriptMode expectedScriptMode, ScriptContext... scriptContexts) { - assertScriptModes(expectedScriptMode, ScriptType.values(), scriptContexts); + private void assertScriptModesAllTypes(boolean expectedScriptEnabled, ScriptContext... scriptContexts) { + assertScriptModes(expectedScriptEnabled, ScriptType.values(), scriptContexts); } - private void assertScriptModes(ScriptMode expectedScriptMode, ScriptType[] scriptTypes, ScriptContext... scriptContexts) { + private void assertScriptModes(boolean expectedScriptEnabled, ScriptType[] scriptTypes, ScriptContext... scriptContexts) { assert scriptTypes.length > 0; assert scriptContexts.length > 0; for (ScriptType scriptType : scriptTypes) { checkedSettings.add("script.engine.custom." + scriptType); for (ScriptContext scriptContext : scriptContexts) { assertThat("custom." + scriptType + "." + scriptContext.getKey() + " doesn't have the expected value", - scriptModes.getScriptMode("custom", scriptType, scriptContext), equalTo(expectedScriptMode)); + scriptModes.getScriptEnabled("custom", scriptType, scriptContext), equalTo(expectedScriptEnabled)); checkedSettings.add("custom." + scriptType + "." + scriptContext); } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 8fd8f674c3a..890ffccc514 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -71,12 +71,12 @@ public class ScriptServiceTests extends ESTestCase { private Path scriptsFilePath; private Settings baseSettings; - private static final Map DEFAULT_SCRIPT_MODES = new HashMap<>(); + private static final Map DEFAULT_SCRIPT_ENABLED = new HashMap<>(); static { - DEFAULT_SCRIPT_MODES.put(ScriptType.FILE, ScriptMode.ON); - DEFAULT_SCRIPT_MODES.put(ScriptType.STORED, ScriptMode.OFF); - DEFAULT_SCRIPT_MODES.put(ScriptType.INLINE, ScriptMode.OFF); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.FILE, true); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.STORED, false); + DEFAULT_SCRIPT_ENABLED.put(ScriptType.INLINE, false); } @Before @@ -110,7 +110,7 @@ public class ScriptServiceTests extends ESTestCase { contexts.put(context, new ScriptContext.Plugin(plugin, operation)); } List registries = new ArrayList<>(2); - registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME, ScriptMode.ON)); + registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME, true)); registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestDangerousEngineService.class, TestDangerousEngineService.NAME)); scriptEngineRegistry = new ScriptEngineRegistry(registries); scriptContextRegistry = new ScriptContextRegistry(contexts.values()); @@ -215,25 +215,25 @@ public class ScriptServiceTests extends ESTestCase { public void testFineGrainedSettings() throws IOException { //collect the fine-grained settings to set for this run int numScriptSettings = randomIntBetween(0, ScriptType.values().length); - Map scriptSourceSettings = new HashMap<>(); + Map scriptSourceSettings = new HashMap<>(); for (int i = 0; i < numScriptSettings; i++) { ScriptType scriptType; do { scriptType = randomFrom(ScriptType.values()); } while (scriptSourceSettings.containsKey(scriptType)); - scriptSourceSettings.put(scriptType, randomFrom(ScriptMode.values())); + scriptSourceSettings.put(scriptType, randomBoolean()); } int numScriptContextSettings = randomIntBetween(0, this.scriptContextRegistry.scriptContexts().size()); - Map scriptContextSettings = new HashMap<>(); + Map scriptContextSettings = new HashMap<>(); for (int i = 0; i < numScriptContextSettings; i++) { ScriptContext scriptContext; do { scriptContext = randomFrom(this.scriptContexts); } while (scriptContextSettings.containsKey(scriptContext)); - scriptContextSettings.put(scriptContext, randomFrom(ScriptMode.values())); + scriptContextSettings.put(scriptContext, randomBoolean()); } int numEngineSettings = randomIntBetween(0, ScriptType.values().length * scriptContexts.length); - Map engineSettings = new HashMap<>(); + Map engineSettings = new HashMap<>(); for (int i = 0; i < numEngineSettings; i++) { String settingKey; do { @@ -241,43 +241,34 @@ public class ScriptServiceTests extends ESTestCase { ScriptContext scriptContext = randomFrom(this.scriptContexts); settingKey = scriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey(); } while (engineSettings.containsKey(settingKey)); - engineSettings.put(settingKey, randomFrom(ScriptMode.values())); + engineSettings.put(settingKey, randomBoolean()); } //set the selected fine-grained settings Settings.Builder builder = Settings.builder(); - for (Map.Entry entry : scriptSourceSettings.entrySet()) { - switch (entry.getValue()) { - case ON: - builder.put("script" + "." + entry.getKey().getScriptType(), "true"); - break; - case OFF: - builder.put("script" + "." + entry.getKey().getScriptType(), "false"); - break; + for (Map.Entry entry : scriptSourceSettings.entrySet()) { + if (entry.getValue()) { + builder.put("script" + "." + entry.getKey().getScriptType(), "true"); + } else { + builder.put("script" + "." + entry.getKey().getScriptType(), "false"); } } - for (Map.Entry entry : scriptContextSettings.entrySet()) { - switch (entry.getValue()) { - case ON: - builder.put("script" + "." + entry.getKey().getKey(), "true"); - break; - case OFF: - builder.put("script" + "." + entry.getKey().getKey(), "false"); - break; + for (Map.Entry entry : scriptContextSettings.entrySet()) { + if (entry.getValue()) { + builder.put("script" + "." + entry.getKey().getKey(), "true"); + } else { + builder.put("script" + "." + entry.getKey().getKey(), "false"); } } - for (Map.Entry entry : engineSettings.entrySet()) { + for (Map.Entry entry : engineSettings.entrySet()) { int delimiter = entry.getKey().indexOf('.'); String part1 = entry.getKey().substring(0, delimiter); String part2 = entry.getKey().substring(delimiter + 1); String lang = randomFrom(scriptEnginesByLangMap.get(part1).getType()); - switch (entry.getValue()) { - case ON: - builder.put("script.engine" + "." + lang + "." + part2, "true"); - break; - case OFF: - builder.put("script.engine" + "." + lang + "." + part2, "false"); - break; + if (entry.getValue()) { + builder.put("script.engine" + "." + lang + "." + part2, "true"); + } else { + builder.put("script.engine" + "." + lang + "." + part2, "false"); } } @@ -290,25 +281,22 @@ public class ScriptServiceTests extends ESTestCase { String script = scriptType == ScriptType.FILE ? "file_script" : "script"; for (ScriptContext scriptContext : this.scriptContexts) { //fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings - ScriptMode scriptMode = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey()); - if (scriptMode == null) { - scriptMode = scriptContextSettings.get(scriptContext); + Boolean scriptEnabled = engineSettings.get(dangerousScriptEngineService.getType() + "." + scriptType + "." + scriptContext.getKey()); + if (scriptEnabled == null) { + scriptEnabled = scriptContextSettings.get(scriptContext); } - if (scriptMode == null) { - scriptMode = scriptSourceSettings.get(scriptType); + if (scriptEnabled == null) { + scriptEnabled = scriptSourceSettings.get(scriptType); } - if (scriptMode == null) { - scriptMode = DEFAULT_SCRIPT_MODES.get(scriptType); + if (scriptEnabled == null) { + scriptEnabled = DEFAULT_SCRIPT_ENABLED.get(scriptType); } String lang = dangerousScriptEngineService.getType(); - switch (scriptMode) { - case ON: - assertCompileAccepted(lang, script, scriptType, scriptContext); - break; - case OFF: - assertCompileRejected(lang, script, scriptType, scriptContext); - break; + if (scriptEnabled) { + assertCompileAccepted(lang, script, scriptType, scriptContext); + } else { + assertCompileRejected(lang, script, scriptType, scriptContext); } } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java index 3d82e2f1468..92598ec8dd2 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptSettingsTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; @@ -39,7 +38,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testDefaultLanguageIsGroovy() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(Settings.EMPTY), equalTo("groovy")); @@ -47,7 +46,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testCustomDefaultLanguage() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); String defaultLanguage = CustomScriptEngineService.NAME; @@ -57,7 +56,7 @@ public class ScriptSettingsTests extends ESTestCase { public void testInvalidDefaultLanguage() { ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, ScriptMode.ON))); + new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); Settings settings = Settings.builder().put("script.default_lang", "C++").build(); diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 0c56234e48d..384e181c899 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -143,7 +143,6 @@ public class SearchModuleTests extends ModuleTestCase { "multi_match", "nested", "parent_id", - "percolate", "prefix", "query_string", "range", diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index 2949dadcc49..7d21fbaf2a1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -40,7 +40,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.AbstractQueryTestCase; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -51,7 +51,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -75,8 +74,8 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; public class AggregatorParsingTests extends ESTestCase { @@ -135,7 +134,7 @@ public class AggregatorParsingTests extends ESTestCase { new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, - ScriptMode.ON))); + true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index e986ab1288f..e7933fb1d0c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -44,7 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.AbstractQueryTestCase; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -55,7 +55,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -76,8 +75,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public abstract class BaseAggregationTestCase> extends ESTestCase { @@ -149,7 +148,7 @@ public abstract class BaseAggregationTestCase> new ScriptEngineRegistry(Collections .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, - ScriptMode.ON))); + true))); bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 113b52cb0b6..8fdc3de325d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -44,7 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.AbstractQueryTestCase; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -55,7 +55,6 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; @@ -77,8 +76,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; public abstract class BasePipelineAggregationTestCase extends ESTestCase { @@ -148,7 +147,7 @@ public abstract class BasePipelineAggregationTestCase { - private final static String[] timeZoneIds = DateTimeZone.getAvailableIDs().toArray(new String[DateTimeZone.getAvailableIDs().size()]); - @Override protected DateRangeAggregationBuilder createTestAggregatorBuilder() { int numRanges = randomIntBetween(1, 10); @@ -60,7 +57,7 @@ public class DateRangeTests extends BaseAggregationTestCase allNodeIds = new ArrayList<>(); + ArrayList allNodeNames = new ArrayList<>(); + ArrayList allNodeHosts = new ArrayList<>(); + NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().execute().actionGet(); + for (NodeStats node : nodeStats.getNodes()) { + allNodeIds.add(node.getNode().getId()); + allNodeNames.add(node.getNode().getName()); + allNodeHosts.add(node.getHostname()); + } + + String node_expr = "_only_nodes:" + Strings.arrayToCommaDelimitedString(allNodeIds.toArray()); + request = client.prepareSearch("test").setQuery(matchAllQuery()).setPreference(node_expr); + assertSearchOnRandomNodes(request); + + node_expr = "_only_nodes:" + Strings.arrayToCommaDelimitedString(allNodeNames.toArray()); + request = client.prepareSearch("test").setQuery(matchAllQuery()).setPreference(node_expr); + assertSearchOnRandomNodes(request); + + node_expr = "_only_nodes:" + Strings.arrayToCommaDelimitedString(allNodeHosts.toArray()); + request = client.prepareSearch("test").setQuery(matchAllQuery()).setPreference(node_expr); + assertSearchOnRandomNodes(request); + + node_expr = "_only_nodes:" + Strings.arrayToCommaDelimitedString(allNodeHosts.toArray()); + request = client.prepareSearch("test").setQuery(matchAllQuery()).setPreference(node_expr); + assertSearchOnRandomNodes(request); + + // Mix of valid and invalid nodes + node_expr = "_only_nodes:*,invalidnode"; + request = client.prepareSearch("test").setQuery(matchAllQuery()).setPreference(node_expr); + assertSearchOnRandomNodes(request); + } + + private void assertSearchOnRandomNodes(SearchRequestBuilder request) { + Set hitNodes = new HashSet<>(); + for (int i = 0; i < 2; i++) { + SearchResponse searchResponse = request.execute().actionGet(); + assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); + hitNodes.add(searchResponse.getHits().getAt(0).shard().nodeId()); + } + assertThat(hitNodes.size(), greaterThan(1)); + } } diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 9dd32e091cd..882226afe63 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1436,7 +1436,7 @@ public class SearchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test").setQuery( spanNearQuery(spanTermQuery("description", "foo"), 3) - .clause(spanTermQuery("description", "other"))).get(); + .addClause(spanTermQuery("description", "other"))).get(); assertHitCount(searchResponse, 3L); } @@ -1481,17 +1481,17 @@ public class SearchQueryIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "brown"))).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "sleeping")).dist(5)).get(); assertHitCount(searchResponse, 1L); searchResponse = client().prepareSearch("test") .setQuery(spanNotQuery(spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1) - .clause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get(); + .addClause(QueryBuilders.spanTermQuery("description", "fox")), spanTermQuery("description", "jumped")).pre(1).post(1)).get(); assertHitCount(searchResponse, 1L); } diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index d0f2c0492da..5cc59046433 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -158,7 +158,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, - null, null, null, null) { + null, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java index ec9e72639d2..9ee9e0841cf 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -318,7 +317,7 @@ public class SearchScrollIT extends ESIntegTestCase { public void testClearNonExistentScrollId() throws Exception { createIndex("idx"); ClearScrollResponse response = client().prepareClearScroll() - .addScrollId("cXVlcnlUaGVuRmV0Y2g7MzsyOlpBRC1qOUhrUjhhZ0NtQWUxU2FuWlE7MjpRcjRaNEJ2R1JZV1VEMW02ZGF1LW5ROzI6S0xUal9lZDRTd3lWNUhUU2VSb01CQTswOw==") + .addScrollId("DnF1ZXJ5VGhlbkZldGNoAwAAAAAAAAABFnRtLWMyRzBqUUQyNk1uM0xDTjJ4S0EAAAAAAAAAARYzNkhxbWFTYVFVNmgxTGQyYUZVYV9nAAAAAAAAAAEWdVcxNWZmRGZSVFN2V0xMUGF2NGx1Zw==") .get(); // Whether we actually clear a scroll, we can't know, since that information isn't serialized in the // free search context response, which is returned from each node we want to clear a particular scroll. @@ -330,24 +329,19 @@ public class SearchScrollIT extends ESIntegTestCase { public void testClearIllegalScrollId() throws Exception { createIndex("idx"); - try { - client().prepareClearScroll().addScrollId("c2Nhbjs2OzM0NDg1ODpzRlBLc0FXNlNyNm5JWUc1").get(); - fail(); - } catch (IllegalArgumentException e) { - } - try { - // Fails during base64 decoding (Base64-encoded string must have at least four characters) - client().prepareClearScroll().addScrollId("a").get(); - fail(); - } catch (IllegalArgumentException e) { - } - try { - client().prepareClearScroll().addScrollId("abcabc").get(); - fail(); - // if running without -ea this will also throw ElasticsearchIllegalArgumentException - } catch (UncategorizedExecutionException e) { - assertThat(e.getRootCause(), instanceOf(AssertionError.class)); - } + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> client().prepareClearScroll().addScrollId("c2Nhbjs2OzM0NDg1ODpzRlBLc0FXNlNyNm5JWUc1").get()); + assertEquals("Cannot parse scroll id", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, + // Fails during base64 decoding (Base64-encoded string must have at least four characters) + () -> client().prepareClearScroll().addScrollId("a").get()); + assertEquals("Cannot parse scroll id", e.getMessage()); + + e = expectThrows(IllegalArgumentException.class, + // Other invalid base64 + () -> client().prepareClearScroll().addScrollId("abcabc").get()); + assertEquals("Cannot parse scroll id", e.getMessage()); } public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 21b71508bbb..e964b975bb2 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -232,7 +232,7 @@ public abstract class AbstractSortTestCase> extends EST } }); return new QueryShardContext(idxSettings, bitsetFilterCache, ifds, null, null, scriptService, - indicesQueriesRegistry, null, null, null, null) { + indicesQueriesRegistry, null, null, null) { @Override public MappedFieldType fieldMapper(String name) { return provideMappedFieldType(name); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java index d09c8f172df..812e43918d9 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java @@ -29,10 +29,8 @@ import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; @@ -65,7 +63,6 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.hamcrest.Matchers.containsString; @@ -114,39 +111,6 @@ public class CompletionSuggestSearch2xIT extends ESIntegTestCase { assertSuggestionsNotInOrder("t", "The Prodigy", "Turbonegro", "Turbonegro Get it on", "The Prodigy Firestarter"); } - public void testSuggestFieldWithPercolateApi() throws Exception { - createIndexAndMapping(completionMappingBuilder); - String[][] inputs = {{"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, - {"Generator", "Foo Fighters Generator"}, {"Learn to Fly", "Foo Fighters Learn to Fly"}, - {"The Prodigy"}, {"The Prodigy"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; // work with frequencies - for (int i = 0; i < inputs.length; i++) { - XContentBuilder source = jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input"); - for (String input : inputs[i]) { - source.value(input); - } - source.endArray() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(source).execute().actionGet(); - } - - client().prepareIndex(INDEX, ".percolator", "4") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - - refresh(); - - PercolateResponse response = client().preparePercolate().setIndices(INDEX).setDocumentType(TYPE) - .setGetRequest(Requests.getRequest(INDEX).type(TYPE).id("1")) - .execute().actionGet(); - assertThat(response.getCount(), equalTo(1L)); - - } - public void testBasicPrefixSuggestion() throws Exception { completionMappingBuilder.payloads(true); createIndexAndMapping(completionMappingBuilder); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index a625fae7b4e..9ef3c898cab 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -31,17 +31,14 @@ import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -71,7 +68,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -394,39 +390,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { } } - public void testSuggestFieldWithPercolateApi() throws Exception { - createIndexAndMapping(completionMappingBuilder); - String[][] inputs = {{"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, - {"Generator", "Foo Fighters Generator"}, {"Learn to Fly", "Foo Fighters Learn to Fly"}, - {"The Prodigy"}, {"The Prodigy"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; // work with frequencies - for (int i = 0; i < inputs.length; i++) { - XContentBuilder source = jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input"); - for (String input : inputs[i]) { - source.value(input); - } - source.endArray() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(source).execute().actionGet(); - } - client().admin().indices().preparePutMapping(INDEX).setType("query").setSource("query", "type=percolator").get(); - - client().prepareIndex(INDEX, "query", "4") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - - refresh(); - - PercolateResponse response = client().preparePercolate().setIndices(INDEX).setDocumentType(TYPE) - .setGetRequest(Requests.getRequest(INDEX).type(TYPE).id("1")) - .execute().actionGet(); - assertThat(response.getCount(), equalTo(1L)); - } - public void testThatWeightsAreWorking() throws Exception { createIndexAndMapping(completionMappingBuilder); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 5ed63b519a4..7ca30132a4a 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -640,7 +640,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.builder().put("location", repositoryLocation))); - createIndex("test-idx"); + prepareCreate("test-idx").setSettings(Settings.builder().put("index.allocation.max_retries", Integer.MAX_VALUE)).get(); ensureGreen(); logger.info("--> indexing some data"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index cbd9bc0e96f..7df44738076 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -231,7 +231,7 @@ public class MockRepository extends FsRepository { private boolean shouldFail(String blobName, double probability) { if (probability > 0.0) { - String path = path().add(blobName).buildAsString("/") + "/" + randomPrefix; + String path = path().add(blobName).buildAsString() + randomPrefix; path += "/" + incrementAndGet(path); logger.info("checking [{}] [{}]", path, Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability); return Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability; diff --git a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java index 94d6d075589..2212f162eb6 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ScalingThreadPoolTests.java @@ -59,7 +59,7 @@ public class ScalingThreadPoolTests extends ESThreadPoolTestCase { final int expectedSize; if (sizeBasedOnNumberOfProcessors < min || randomBoolean()) { - expectedSize = randomIntBetween(min, 16); + expectedSize = randomIntBetween(Math.max(1, min), 16); builder.put("threadpool." + threadPoolName + ".size", expectedSize); } else { expectedSize = sizeBasedOnNumberOfProcessors; @@ -177,7 +177,8 @@ public class ScalingThreadPoolTests extends ESThreadPoolTestCase { } }); } - assertThat(stats(threadPool, threadPoolName).getThreads(), equalTo(128)); + int threads = stats(threadPool, threadPoolName).getThreads(); + assertEquals(128, threads); latch.countDown(); // this while loop is the core of this test; if threads // are correctly idled down by the pool, the number of diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java index 5c3f0999ea2..eb13de34858 100644 --- a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilter; @@ -217,26 +216,6 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { assertRequestsContainHeader(MultiTermVectorsRequest.class); } - public void testThatPercolatingExistingDocumentGetRequestContainsContextAndHeaders() throws Exception { - Client client = transportClient(); - client.admin().indices().preparePutMapping(lookupIndex).setType("query").setSource("query", "type=percolator").get(); - client.prepareIndex(lookupIndex, "query", "1") - .setSource(jsonBuilder().startObject() - .startObject("query").startObject("match").field("name", "star wars").endObject().endObject() - .endObject()) - .get(); - client.prepareIndex(lookupIndex, "type", "1") - .setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()) - .get(); - client.admin().indices().prepareRefresh(lookupIndex).get(); - - GetRequest getRequest = client.prepareGet(lookupIndex, "type", "1").request(); - PercolateResponse response = client.preparePercolate().setDocumentType("type").setGetRequest(getRequest).get(); - assertThat(response.getCount(), is(1L)); - - assertGetRequestsContainHeaders(); - } - public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { String releventHeaderName = "relevant_" + randomHeaderKey; for (RestController restController : internalCluster().getDataNodeInstances(RestController.class)) { diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 65553a4a90e..0445da61096 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; @@ -94,7 +93,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.NAME, true)); } } @@ -181,7 +180,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.NAME, true)); } } @@ -261,7 +260,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.NAME, true)); } } @@ -341,7 +340,7 @@ public class UpdateIT extends ESIntegTestCase { } public void onModule(ScriptModule module) { - module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.NAME, ScriptMode.ON)); + module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.NAME, true)); } } diff --git a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml index 515e4320fd2..548b186e46f 100644 --- a/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml +++ b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console logger: test: TRACE, console diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index 361934908ec..f73bc58554b 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -137,13 +137,13 @@ def start_node(version, release_dir, data_dir, repo_dir, tcp_port=DEFAULT_TRANSP cmd = [ os.path.join(release_dir, 'bin/elasticsearch'), - '-Des.path.data=%s' % data_dir, - '-Des.path.logs=logs', - '-Des.cluster.name=%s' % cluster_name, - '-Des.network.host=localhost', - '-Des.transport.tcp.port=%s' % tcp_port, - '-Des.http.port=%s' % http_port, - '-Des.path.repo=%s' % repo_dir + '-Epath.data=%s' % data_dir, + '-Epath.logs=logs', + '-Ecluster.name=%s' % cluster_name, + '-Enetwork.host=localhost', + '-Etransport.tcp.port=%s' % tcp_port, + '-Ehttp.port=%s' % http_port, + '-Epath.repo=%s' % repo_dir ] if version.startswith('0.') or version.startswith('1.0.0.Beta') : cmd.append('-f') # version before 1.0 start in background automatically diff --git a/dev-tools/prepare_release_candidate.py b/dev-tools/prepare_release_candidate.py index be5660c60ae..84c10f4b9e4 100644 --- a/dev-tools/prepare_release_candidate.py +++ b/dev-tools/prepare_release_candidate.py @@ -56,7 +56,7 @@ The packages may be downloaded from the following URLs: Plugins can be installed as follows: - bin/elasticsearch-plugin -Des.plugins.staging=true install cloud-aws + ES_JAVA_OPTS="-Des.plugins.staging=true" bin/elasticsearch-plugin install cloud-aws The same goes for the x-plugins: diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 39db9929a54..32269e319bb 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -63,7 +63,6 @@ DEFAULT_PLUGINS = ["analysis-icu", "analysis-phonetic", "analysis-smartcn", "analysis-stempel", - "delete-by-query", "discovery-azure", "discovery-ec2", "discovery-gce", @@ -192,7 +191,7 @@ def smoke_test_release(release, files, expected_hash, plugins): plugin_names = {} for plugin in plugins: print(' Install plugin [%s]' % (plugin)) - run('%s; %s -Des.plugins.staging=true %s %s' % (java_exe(), es_plugin_path, 'install -b', plugin)) + run('%s; export ES_JAVA_OPTS="-Des.plugins.staging=true"; %s %s %s' % (java_exe(), es_plugin_path, 'install -b', plugin)) plugin_names[plugin] = True if 'x-pack' in plugin_names: headers = { 'Authorization' : 'Basic %s' % base64.b64encode(b"es_admin:foobar").decode("UTF-8") } @@ -203,7 +202,7 @@ def smoke_test_release(release, files, expected_hash, plugins): headers = {} print(' Starting elasticsearch deamon from [%s]' % es_dir) try: - run('%s; %s -Ees.node.name=smoke_tester -Ees.cluster.name=prepare_release -Ees.script.inline=true -Ees.script.stored=true -Ees.repositories.url.allowed_urls=http://snapshot.test* %s -Ees.pidfile=%s -Ees.node.portsfile=true' + run('%s; %s -Enode.name=smoke_tester -Ecluster.name=prepare_release -Escript.inline=true -Escript.stored=true -Erepositories.url.allowed_urls=http://snapshot.test* %s -Epidfile=%s -Enode.portsfile=true' % (java_exe(), es_run_path, '-d', os.path.join(es_dir, 'es-smoke.pid'))) if not wait_for_node_startup(es_dir, header=headers): print("elasticsearch logs:") diff --git a/distribution/build.gradle b/distribution/build.gradle index 0df2f24fef0..12bb17b807f 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -302,10 +302,6 @@ configure(subprojects.findAll { ['deb', 'rpm'].contains(it.name) }) { '''.stripIndent().replace('\n', ' ').trim() url 'https://www.elastic.co/' - /* The version of the package can't contain -SNAPSHOT so we rip it off if - we see it. We'll add it back on to the file name though. */ - version project.version.replace('-SNAPSHOT', '') - // signing setup if (project.hasProperty('signing.password') && System.getProperty('build.snapshot', 'true') == 'false') { signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4' diff --git a/distribution/deb/build.gradle b/distribution/deb/build.gradle index a3d42a2c042..89d33597888 100644 --- a/distribution/deb/build.gradle +++ b/distribution/deb/build.gradle @@ -22,6 +22,8 @@ task buildDeb(type: Deb) { baseName 'elasticsearch' // this is what pom generation uses for artifactId // Follow elasticsearch's deb file naming convention archiveName "${packageName}-${project.version}.deb" + version = project.version + packageGroup 'web' requires 'libc6' requires 'adduser' diff --git a/distribution/deb/src/main/packaging/init.d/elasticsearch b/distribution/deb/src/main/packaging/init.d/elasticsearch index a01643db2b3..f04008ba796 100755 --- a/distribution/deb/src/main/packaging/init.d/elasticsearch +++ b/distribution/deb/src/main/packaging/init.d/elasticsearch @@ -79,7 +79,7 @@ fi # Define other required variables PID_FILE="$PID_DIR/$NAME.pid" DAEMON=$ES_HOME/bin/elasticsearch -DAEMON_OPTS="-d -p $PID_FILE -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR" +DAEMON_OPTS="-d -p $PID_FILE -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR" export ES_JAVA_OPTS export JAVA_HOME @@ -116,15 +116,6 @@ case "$1" in exit 0 fi - # Prepare environment - # Check $DATA_DIR for a comma - if [ "${DATA_DIR#*,}" != "$DATA_DIR" ]; then - # $DATA_DIR contains a comma, so we should not mkdir it - mkdir -p "$LOG_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" - else - mkdir -p "$LOG_DIR" "$DATA_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" "$DATA_DIR" - fi - # Ensure that the PID_DIR exists (it is cleaned at OS startup time) if [ -n "$PID_DIR" ] && [ ! -e "$PID_DIR" ]; then mkdir -p "$PID_DIR" && chown "$ES_USER":"$ES_GROUP" "$PID_DIR" diff --git a/distribution/rpm/build.gradle b/distribution/rpm/build.gradle index 72ed58e52c3..185f558ff02 100644 --- a/distribution/rpm/build.gradle +++ b/distribution/rpm/build.gradle @@ -25,14 +25,8 @@ task buildRpm(type: Rpm) { packageGroup 'Application/Internet' prefix '/usr' packager 'Elasticsearch' - if (version.contains('~')) { - def tokenized = version.tokenize('~') - version tokenized[0] - release tokenized[1] - } else { - version version - release '1' - } + version = project.version.replace('-', '_') + release = '1' arch 'NOARCH' os 'LINUX' license '2009' diff --git a/distribution/rpm/src/main/packaging/init.d/elasticsearch b/distribution/rpm/src/main/packaging/init.d/elasticsearch index 7bcb5692a88..8f1d93dcbdc 100644 --- a/distribution/rpm/src/main/packaging/init.d/elasticsearch +++ b/distribution/rpm/src/main/packaging/init.d/elasticsearch @@ -114,7 +114,7 @@ start() { cd $ES_HOME echo -n $"Starting $prog: " # if not running, start it up here, usually something like "daemon $exec" - daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Ees.default.path.home=$ES_HOME -Ees.default.path.logs=$LOG_DIR -Ees.default.path.data=$DATA_DIR -Ees.default.path.conf=$CONF_DIR + daemon --user $ES_USER --pidfile $pidfile $exec -p $pidfile -d -Edefault.path.logs=$LOG_DIR -Edefault.path.data=$DATA_DIR -Edefault.path.conf=$CONF_DIR retval=$? echo [ $retval -eq 0 ] && touch $lockfile diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index ccbf4650a22..0c99464c4f6 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -21,9 +21,9 @@ ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -p ${PID_DIR}/elasticsearch.pid \ - -Ees.default.path.logs=${LOG_DIR} \ - -Ees.default.path.data=${DATA_DIR} \ - -Ees.default.path.conf=${CONF_DIR} + -Edefault.path.logs=${LOG_DIR} \ + -Edefault.path.data=${DATA_DIR} \ + -Edefault.path.conf=${CONF_DIR} StandardOutput=journal StandardError=inherit diff --git a/distribution/src/main/resources/bin/elasticsearch-plugin b/distribution/src/main/resources/bin/elasticsearch-plugin index 8a3b6676a98..06f8c5b8c27 100755 --- a/distribution/src/main/resources/bin/elasticsearch-plugin +++ b/distribution/src/main/resources/bin/elasticsearch-plugin @@ -81,10 +81,10 @@ fi HOSTNAME=`hostname | cut -d. -f1` export HOSTNAME -declare -a properties=(-Delasticsearch -Des.path.home="$ES_HOME") +declare -a args=("$@") if [ -e "$CONF_DIR" ]; then - properties=("${properties[@]}" -Des.default.path.conf="$CONF_DIR") + args=("${args[@]}" -Edefault.path.conf="$CONF_DIR") fi -exec "$JAVA" $ES_JAVA_OPTS "${properties[@]}" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "$@" +exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginCli "${args[@]}" diff --git a/distribution/src/main/resources/bin/service.bat b/distribution/src/main/resources/bin/service.bat index 3d73c37b823..81b6c8a5df5 100644 --- a/distribution/src/main/resources/bin/service.bat +++ b/distribution/src/main/resources/bin/service.bat @@ -163,7 +163,7 @@ set ES_JVM_OPTIONS="%ES_HOME%\config\jvm.options" if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;% @setlocal -for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%"`) do set JVM_OPTIONS=!JVM_OPTIONS!%%a; +for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%" ^| findstr /b /v "\-server \-client"`) do set JVM_OPTIONS=!JVM_OPTIONS!%%a; @endlocal & set ES_JAVA_OPTS=%JVM_OPTIONS%%ES_JAVA_OPTS% if "%ES_JAVA_OPTS:~-1%"==";" set ES_JAVA_OPTS=%ES_JAVA_OPTS:~0,-1% diff --git a/distribution/src/main/resources/config/logging.yml b/distribution/src/main/resources/config/logging.yml index 187e79cffa0..11cd181ebd0 100644 --- a/distribution/src/main/resources/config/logging.yml +++ b/distribution/src/main/resources/config/logging.yml @@ -1,6 +1,7 @@ -# you can override this using by setting a system property, for example -Ees.logger.level=DEBUG -es.logger.level: INFO -rootLogger: ${es.logger.level}, console, file +# you can override using a command-line parameter +# -E logger.level=(ERROR|WARN|INFO|DEBUG|TRACE) +logger.level: INFO +rootLogger: ${logger.level}, console, file logger: # log action execution errors for easier debugging action: DEBUG diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index 60e03e9f32e..186d707379e 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -1,8 +1,25 @@ [[java-query-percolate-query]] ==== Percolate query -See: - * {ref}/query-dsl-percolate-query.html[Percolate Query] +See: {ref}/query-dsl-percolate-query.html[Percolate Query] + +In order to use the `percolate` query from the Java API your +the percolator module dependency should be on the classpath and +the transport client should be loaded with the percolator plugin: + +[source,java] +-------------------------------------------------- +TransportClient transportClient = TransportClient.builder() + .settings(Settings.builder().put("node.name", "node")) + .addPlugin(PercolatorPlugin.class) + .build(); +transportClient.addTransportAddress( + new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) +); +-------------------------------------------------- + +Before the `percolate` query can be used an `percolator` mapping should be added and +a document containing a percolator query should be indexed: [source,java] -------------------------------------------------- diff --git a/docs/java-api/query-dsl/special-queries.asciidoc b/docs/java-api/query-dsl/special-queries.asciidoc index 64645685ca5..31db47ce636 100644 --- a/docs/java-api/query-dsl/special-queries.asciidoc +++ b/docs/java-api/query-dsl/special-queries.asciidoc @@ -20,6 +20,9 @@ final query to execute. This query allows a script to act as a filter. Also see the <>. +<>:: + +This query finds percolator queries based on documents. include::mlt-query.asciidoc[] diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index 19b4fdb07cd..815e6285235 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -48,7 +48,7 @@ convert `nfc` to `nfd` or `nfkc` to `nfkd` respectively: Here are two examples, the default usage and a customised character filter: -[source,json] +[source,js] -------------------------------------------------- PUT icu_sample { @@ -96,7 +96,7 @@ but adds better support for some Asian languages by using a dictionary-based approach to identify words in Thai, Lao, Chinese, Japanese, and Korean, and using custom rules to break Myanmar and Khmer text into syllables. -[source,json] +[source,js] -------------------------------------------------- PUT icu_sample { @@ -137,7 +137,7 @@ As a demonstration of how the rule files can be used, save the following user fi Then create an analyzer to use this rule file as follows: -[source,json] +[source,js] -------------------------------------------------- PUT icu_sample { @@ -167,7 +167,7 @@ POST icu_sample/_analyze?analyzer=my_analyzer&text=Elasticsearch. Wow! The above `analyze` request returns the following: -[source,json] +[source,js] -------------------------------------------------- # Result { @@ -198,7 +198,7 @@ You should probably prefer the <>:: - -The delete by query plugin adds support for deleting all of the documents -(from one or more indices) which match the specified query. It is a -replacement for the problematic _delete-by-query_ functionality which has been -removed from Elasticsearch core. - [float] === Community contributed API extension plugins @@ -46,4 +34,3 @@ A number of plugins have been contributed by our community: http://mahout.apache.org/[Mahout] Collaboration filtering (by hadashiA) * https://github.com/jurgc11/es-change-feed-plugin[WebSocket Change Feed Plugin] (by ForgeRock/Chris Clifton) -include::delete-by-query.asciidoc[] diff --git a/docs/plugins/delete-by-query.asciidoc b/docs/plugins/delete-by-query.asciidoc deleted file mode 100644 index aebc0dd7433..00000000000 --- a/docs/plugins/delete-by-query.asciidoc +++ /dev/null @@ -1,270 +0,0 @@ -[[plugins-delete-by-query]] -=== Delete By Query Plugin - -The delete-by-query plugin adds support for deleting all of the documents -(from one or more indices) which match the specified query. It is a -replacement for the problematic _delete-by-query_ functionality which has been -removed from Elasticsearch core. - -Internally, it uses {ref}/search-request-scroll.html[Scroll] -and {ref}/docs-bulk.html[Bulk] APIs to delete documents in an efficient and -safe manner. It is slower than the old _delete-by-query_ functionality, but -fixes the problems with the previous implementation. - -To understand more about why we removed delete-by-query from core and about -the semantics of the new implementation, see -<>. - -[TIP] -============================================ -Queries which match large numbers of documents may run for a long time, -as every document has to be deleted individually. Don't use _delete-by-query_ -to clean out all or most documents in an index. Rather create a new index and -perhaps reindex the documents you want to keep. -============================================ - -[float] -==== Installation - -This plugin can be installed using the plugin manager: - -[source,sh] ----------------------------------------------------------------- -sudo bin/elasticsearch-plugin install delete-by-query ----------------------------------------------------------------- - -The plugin must be installed on every node in the cluster, and each node must -be restarted after installation. - -[float] -==== Removal - -The plugin can be removed with the following command: - -[source,sh] ----------------------------------------------------------------- -sudo bin/elasticsearch-plugin remove delete-by-query ----------------------------------------------------------------- - -The node must be stopped before removing the plugin. - -[[delete-by-query-usage]] -==== Using Delete-by-Query - -The query can either be provided using a simple query string as -a parameter: - -[source,shell] --------------------------------------------------- -DELETE /twitter/tweet/_query?q=user:kimchy --------------------------------------------------- -// CONSOLE - -or using the {ref}/query-dsl.html[Query DSL] defined within the request body: - -[source,js] --------------------------------------------------- -DELETE /twitter/tweet/_query -{ - "query": { <1> - "term": { - "user": "kimchy" - } - } -} --------------------------------------------------- -// CONSOLE - -<1> The query must be passed as a value to the `query` key, in the same way as -the {ref}/search-search.html[search api]. - -Both of the above examples end up doing the same thing, which is to delete all -tweets from the twitter index for the user `kimchy`. - -Delete-by-query supports deletion across -{ref}/search-search.html#search-multi-index-type[multiple indices and multiple types]. - -[float] -=== Query-string parameters - -The following query string parameters are supported: - -`q`:: - -Instead of using the {ref}/query-dsl.html[Query DSL] to pass a `query` in the request -body, you can use the `q` query string parameter to specify a query using -{ref}/query-dsl-query-string-query.html#query-string-syntax[`query_string` syntax]. -In this case, the following additional parameters are supported: `df`, -`analyzer`, `default_operator`, `lowercase_expanded_terms`, -`analyze_wildcard` and `lenient`. -See {ref}/search-uri-request.html[URI search request] for details. - -`size`:: - -The number of hits returned by the {ref}/search-request-scroll.html[scroll] -request. Defaults to 10. May also be specified in the request body. - -`timeout`:: - -The maximum execution time of the delete by query process. Once expired, no -more documents will be deleted. - -`routing`:: - -A comma separated list of routing values to control which shards the delete by -query request should be executed on. - -When using the `q` parameter, the following additional parameters are -supported (as explained in {ref}/search-uri-request.html[URI search request]): `df`, `analyzer`, -`default_operator`. - - -[float] -=== Response body - -The JSON response looks like this: - -[source,js] --------------------------------------------------- -{ - "took" : 639, - "timed_out" : false, - "_indices" : { - "_all" : { - "found" : 5901, - "deleted" : 5901, - "missing" : 0, - "failed" : 0 - }, - "twitter" : { - "found" : 5901, - "deleted" : 5901, - "missing" : 0, - "failed" : 0 - } - }, - "failures" : [ ] -} --------------------------------------------------- - -Internally, the query is used to execute an initial -{ref}/search-request-scroll.html[scroll] request. As hits are -pulled from the scroll API, they are passed to the {ref}/docs-bulk.html[Bulk -API] for deletion. - -IMPORTANT: Delete by query will only delete the version of the document that -was visible to search at the time the request was executed. Any documents -that have been reindexed or updated during execution will not be deleted. - -Since documents can be updated or deleted by external operations during the -_scroll-bulk_ process, the plugin keeps track of different counters for -each index, with the totals displayed under the `_all` index. The counters -are as follows: - -`found`:: - -The number of documents matching the query for the given index. - -`deleted`:: - -The number of documents successfully deleted for the given index. - -`missing`:: - -The number of documents that were missing when the plugin tried to delete -them. Missing documents were present when the original query was run, but have -already been deleted by another process. - -`failed`:: - -The number of documents that failed to be deleted for the given index. A -document may fail to be deleted if it has been updated to a new version by -another process, or if the shard containing the document has gone missing due -to hardware failure, for example. - -[[delete-by-query-plugin-reason]] -==== Why Delete-By-Query is a plugin - -The old delete-by-query API in Elasticsearch 1.x was fast but problematic. We -decided to remove the feature from Elasticsearch for these reasons: - -Forward compatibility:: - - The old implementation wrote a delete-by-query request, including the - query, to the transaction log. This meant that, when upgrading to a new - version, old unsupported queries which cannot be executed might exist in - the translog, thus causing data corruption. - -Consistency and correctness:: - - The old implementation executed the query and deleted all matching docs on - the primary first. It then repeated this procedure on each replica shard. - There was no guarantee that the queries on the primary and the replicas - matched the same document, so it was quite possible to end up with - different documents on each shard copy. - -Resiliency:: - - The old implementation could cause out-of-memory exceptions, merge storms, - and dramatic slow downs if used incorrectly. - -[float] -=== New delete-by-query implementation - -The new implementation, provided by this plugin, is built internally -using {ref}/search-request-scroll.html[scroll] to return -the document IDs and versions of all the documents that need to be deleted. -It then uses the {ref}/docs-bulk.html[`bulk` API] to do the actual deletion. - -This can have performance as well as visibility implications. Delete-by-query -now has the following semantics: - -non-atomic:: - - A delete-by-query may fail at any time while some documents matching the - query have already been deleted. - -try-once:: - - A delete-by-query may fail at any time and will not retry it's execution. - All retry logic is left to the user. - -syntactic sugar:: - - A delete-by-query is equivalent to a scroll search ordered by `_doc` and - corresponding bulk-deletes by ID. - -point-in-time:: - - A delete-by-query will only delete the documents that are visible at the - point in time the delete-by-query was started, equivalent to the - scan/scroll API. - -consistent:: - - A delete-by-query will yield consistent results across all replicas of a - shard. - -forward-compatible:: - - A delete-by-query will only send IDs to the shards as deletes such that no - queries are stored in the transaction logs that might not be supported in - the future. - -visibility:: - - The effect of a delete-by-query request will not be visible to search - until the user refreshes the index, or the index is refreshed - automatically. - -The new implementation suffers from two issues, which is why we decided to -move the functionality to a plugin instead of replacing the feautre in core: - -* It is not as fast as the previous implementation. For most use cases, this - difference should not be noticeable but users running delete-by-query on - many matching documents may be affected. - -* There is currently no way to monitor or cancel a running delete-by-query - request, except for the `timeout` parameter. - -We have plans to solve both of these issues in a later version of Elasticsearch. diff --git a/docs/plugins/lang-javascript.asciidoc b/docs/plugins/lang-javascript.asciidoc index f5887d53e9c..bfee18a0a89 100644 --- a/docs/plugins/lang-javascript.asciidoc +++ b/docs/plugins/lang-javascript.asciidoc @@ -51,7 +51,7 @@ See <> for a safer option. If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[inline scripts], you can use JavaScript as follows: -[source,json] +[source,js] ---- DELETE test @@ -94,7 +94,7 @@ See <> for a safer option. If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[stored scripts], you can use JavaScript as follows: -[source,json] +[source,js] ---- DELETE test @@ -155,7 +155,7 @@ doc["num"].value * factor then use the script as follows: -[source,json] +[source,js] ---- DELETE test diff --git a/docs/plugins/lang-python.asciidoc b/docs/plugins/lang-python.asciidoc index 070ed3eb6a9..af2fc8a3a39 100644 --- a/docs/plugins/lang-python.asciidoc +++ b/docs/plugins/lang-python.asciidoc @@ -50,7 +50,7 @@ See <> for a safer option. If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[inline scripts], you can use Python as follows: -[source,json] +[source,js] ---- DELETE test @@ -93,7 +93,7 @@ See <> for a safer option. If you have enabled {ref}/modules-scripting-security.html#enable-dynamic-scripting[stored scripts], you can use Python as follows: -[source,json] +[source,js] ---- DELETE test @@ -154,7 +154,7 @@ doc["num"].value * factor then use the script as follows: -[source,json] +[source,js] ---- DELETE test diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 7cb7f396608..08ad129f22f 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -135,7 +135,7 @@ can do this as follows: [source,sh] --------------------- -sudo bin/elasticsearch-plugin -Ees.path.conf=/path/to/custom/config/dir install +sudo bin/elasticsearch-plugin -Epath.conf=/path/to/custom/config/dir install --------------------- You can also set the `CONF_DIR` environment variable to the custom config diff --git a/docs/plugins/redirects.asciidoc b/docs/plugins/redirects.asciidoc index caf2008e521..c8cf10c6319 100644 --- a/docs/plugins/redirects.asciidoc +++ b/docs/plugins/redirects.asciidoc @@ -33,6 +33,13 @@ The `cloud-azure` plugin has been split into two separate plugins: The `cloud-gce` plugin has been renamed to <> (`discovery-gce`). +[role="exclude",id="plugins-delete-by-query"] +=== Delete-By-Query plugin removed + +The Delete-By-Query plugin has been removed in favor of a new {ref}/docs-delete-by-query.html[Delete By Query API] +implementation in core. + + diff --git a/docs/plugins/repository-azure.asciidoc b/docs/plugins/repository-azure.asciidoc index dd2ae068eb6..f338728a35d 100644 --- a/docs/plugins/repository-azure.asciidoc +++ b/docs/plugins/repository-azure.asciidoc @@ -129,7 +129,7 @@ The Azure repository supports following settings: Some examples, using scripts: -[source,json] +[source,js] ---- # The simpliest one PUT _snapshot/my_backup1 diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 61ae79180dd..82485b6d9d0 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -137,7 +137,7 @@ use `S3SignerType`, which is Signature Version 2. The S3 repository is using S3 to store snapshots. The S3 repository can be created using the following command: -[source,json] +[source,js] ---- PUT _snapshot/my_s3_repository { diff --git a/docs/plugins/store-smb.asciidoc b/docs/plugins/store-smb.asciidoc index cf0094173d1..731894ae0a8 100644 --- a/docs/plugins/store-smb.asciidoc +++ b/docs/plugins/store-smb.asciidoc @@ -68,7 +68,7 @@ Note that setting will be applied for newly created indices. It can also be set on a per-index basis at index creation time: -[source,json] +[source,js] ---- PUT my_index { diff --git a/docs/reference/analysis/analyzers/configuring.asciidoc b/docs/reference/analysis/analyzers/configuring.asciidoc index c93d800afb9..2ce13702e00 100644 --- a/docs/reference/analysis/analyzers/configuring.asciidoc +++ b/docs/reference/analysis/analyzers/configuring.asciidoc @@ -64,3 +64,38 @@ POST my_index/_analyze English stop words will be removed. The resulting terms are: `[ old, brown, cow ]` + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "old", + "start_offset": 4, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "brown", + "start_offset": 8, + "end_offset": 13, + "type": "", + "position": 2 + }, + { + "token": "cow", + "start_offset": 14, + "end_offset": 17, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index eccd16c23be..1707a9a399b 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -84,6 +84,48 @@ POST my_index/_analyze -------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "is", + "start_offset": 0, + "end_offset": 2, + "type": "", + "position": 0 + }, + { + "token": "this", + "start_offset": 3, + "end_offset": 7, + "type": "", + "position": 1 + }, + { + "token": "deja", + "start_offset": 11, + "end_offset": 15, + "type": "", + "position": 2 + }, + { + "token": "vu", + "start_offset": 16, + "end_offset": 22, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -119,13 +161,10 @@ PUT my_index "analyzer": { "my_custom_analyzer": { "type": "custom", - "char_filter": [ "emoticons" <1> ], - "tokenizer": "punctuation", <1> - "filter": [ "lowercase", "english_stop" <1> @@ -165,11 +204,54 @@ POST my_index/_analyze "text": "I'm a :) person, and you?" } -------------------------------------------------- +// CONSOLE <1> The `emoticon` character filter, `punctuation` tokenizer and `english_stop` token filter are custom implementations which are defined in the same index settings. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "i'm", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "_happy_", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 2 + }, + { + "token": "person", + "start_offset": 9, + "end_offset": 15, + "type": "word", + "position": 3 + }, + { + "token": "you", + "start_offset": 21, + "end_offset": 24, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index b393c883441..24dc92380bb 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -36,6 +36,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "and consistent godel is said sentence this yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] @@ -58,16 +79,11 @@ The `fingerprint` analyzer accepts the following parameters: The maximum token size to emit. Defaults to `255`. Tokens larger than this size will be discarded. -`preserve_original`:: - - If `true`, emits two tokens: one with ASCII-folding of terms that contain - extended characters (if any) and one with the original characters. - Defaults to `false`. - `stopwords`:: A pre-defined stop words list like `_english_` or an array containing a list of stop words. Defaults to `_none_`. + `stopwords_path`:: The path to a file containing stop words. @@ -80,8 +96,7 @@ about stop word configuration. === Example configuration In this example, we configure the `fingerprint` analyzer to use the -pre-defined list of English stop words, and to emit a second token in -the presence of non-ASCII characters: +pre-defined list of English stop words: [source,js] ---------------------------- @@ -92,8 +107,7 @@ PUT my_index "analyzer": { "my_fingerprint_analyzer": { "type": "fingerprint", - "stopwords": "_english_", - "preserve_original": true + "stopwords": "_english_" } } } @@ -110,9 +124,30 @@ POST my_index/_analyze ---------------------------- // CONSOLE -The above example produces the following two terms: +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "consistent godel said sentence yes", + "start_offset": 0, + "end_offset": 52, + "type": "fingerprint", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: [source,text] --------------------------- -[ consistent godel said sentence yes, consistent gödel said sentence yes ] +[ consistent godel said sentence yes ] --------------------------- diff --git a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc index a0c1b1b0a6a..cc94f3b757e 100644 --- a/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/keyword-analyzer.asciidoc @@ -25,6 +25,27 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone.", + "start_offset": 0, + "end_offset": 56, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following single term: [source,text] diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 6a4ca274416..2d5741c2b9e 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -30,6 +30,104 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 8 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 9 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -110,6 +208,55 @@ POST my_index/_analyze <1> The backslashes in the pattern need to be escaped when specifying the pattern as a JSON string. +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "word", + "position": 1 + }, + { + "token": "foo", + "start_offset": 11, + "end_offset": 14, + "type": "word", + "position": 2 + }, + { + "token": "bar", + "start_offset": 15, + "end_offset": 18, + "type": "word", + "position": 3 + }, + { + "token": "com", + "start_offset": 19, + "end_offset": 22, + "type": "word", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] @@ -148,6 +295,62 @@ GET my_index/_analyze -------------------------------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "moose", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "x", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ftp", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "class", + "start_offset": 11, + "end_offset": 16, + "type": "word", + "position": 3 + }, + { + "token": "2", + "start_offset": 16, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "beta", + "start_offset": 18, + "end_offset": 22, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc index 4c932bb5d3e..a57c30d8dd6 100644 --- a/docs/reference/analysis/analyzers/simple-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/simple-analyzer.asciidoc @@ -25,6 +25,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 72292e1d40d..3b948892483 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -33,6 +33,97 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -98,6 +189,89 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index ada9022a287..e40436342d7 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -29,6 +29,83 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] @@ -87,6 +164,76 @@ POST my_index/_analyze ---------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above example produces the following terms: [source,text] diff --git a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc index 0dce8db1c99..f95e5c6e4ab 100644 --- a/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/whitespace-analyzer.asciidoc @@ -25,6 +25,90 @@ POST _analyze --------------------------- // CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + The above sentence would produce the following terms: [source,text] diff --git a/docs/reference/analysis/charfilters.asciidoc b/docs/reference/analysis/charfilters.asciidoc index c9f5805284c..cd24f5bf571 100644 --- a/docs/reference/analysis/charfilters.asciidoc +++ b/docs/reference/analysis/charfilters.asciidoc @@ -1,16 +1,36 @@ [[analysis-charfilters]] == Character Filters -Character filters are used to preprocess the string of -characters before it is passed to the <>. -A character filter may be used to strip out HTML markup, or to convert -`"&"` characters to the word `"and"`. +_Character filters_ are used to preprocess the stream of characters before it +is passed to the <>. -Elasticsearch has built in characters filters which can be -used to build <>. +A character filter receives the original text as a stream of characters and +can transform the stream by adding, removing, or changing characters. For +instance, a character filter could be used to convert Arabic numerals +(٠‎١٢٣٤٥٦٧٨‎٩‎) into their Latin equivalents (0123456789), or to strip HTML +elements like `` from the stream. -include::charfilters/mapping-charfilter.asciidoc[] + +Elasticsearch has a number of built in character filters which can be used to build +<>. + +<>:: + +The `html_strip` character filter strips out HTML elements like `` and +decodes HTML entities like `&`. + +<>:: + +The `mapping` character filter replaces any occurrences of the specified +strings with the specified replacements. + +<>:: + +The `pattern_replace` character filter replaces any characters matching a +regular expression with the specified replacement. include::charfilters/htmlstrip-charfilter.asciidoc[] +include::charfilters/mapping-charfilter.asciidoc[] + include::charfilters/pattern-replace-charfilter.asciidoc[] diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index f12238a36ad..3d8b187d772 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -1,5 +1,135 @@ [[analysis-htmlstrip-charfilter]] === HTML Strip Char Filter -A char filter of type `html_strip` stripping out HTML elements from an -analyzed text. +The `html_strip` character filter strips HTML elements from the text and +replaces HTML entities with their decoded value (e.g. replacing `&` with +`&`). + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", <1> + "char_filter": [ "html_strip" ], + "text": "

I'm so happy!

" +} +--------------------------- +// CONSOLE +<1> The <> returns a single term. + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example returns the term: + +[source,js] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + +The same example with the `standard` tokenizer would return the following terms: + +[source,js] +--------------------------- +[ I'm, so, happy ] +--------------------------- + +[float] +=== Configuration + +The `html_strip` character filter accepts the following parameter: + +[horizontal] +`escaped_tags`:: + + An array of HTML tags which should not be stripped from the original text. + +[float] +=== Example configuration + +In this example, we configure the `html_strip` character filter to leave `` +tags in place: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": ["my_char_filter"] + } + }, + "char_filter": { + "my_char_filter": { + "type": "html_strip", + "escaped_tags": ["b"] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "

I'm so happy!

" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "\nI'm so happy!\n", + "start_offset": 0, + "end_offset": 32, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ \nI'm so happy!\n ] +--------------------------- + + + diff --git a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc index 14c316dcac5..ed90e9f6ab6 100644 --- a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc @@ -1,42 +1,202 @@ [[analysis-mapping-charfilter]] === Mapping Char Filter -A char filter of type `mapping` replacing characters of an analyzed text -with given mapping. +The `mapping` character filter accepts a map of keys and values. Whenever it +encounters a string of characters that is the same as a key, it replaces them +with the value associated with that key. + +Matching is greedy; the longest pattern matching at a given point wins. +Replacements are allowed to be the empty string. + +[float] +=== Configuration + +The `mapping` character filter accepts the following parameters: [horizontal] `mappings`:: - A list of mappings to use. + A array of mappings, with each element having the form `key => value`. `mappings_path`:: - A path, relative to the `config` directory, to a mappings file - configuration. + A path, either absolute or relative to the `config` directory, to a UTF-8 + encoded text mappings file containing a `key => value` mapping per line. -Here is a sample configuration: +Either the `mappings` or `mappings_path` parameter must be provided. + +[float] +=== Example configuration + +In this example, we configure the `mapping` character filter to replace Arabic +numerals with their Latin equivalents: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_mapping" : { - "type" : "mapping", - "mappings" : [ - "ph => f", - "qu => k" - ] - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_mapping"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "keyword", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + "Ù  => 0", + "Ù¡ => 1", + "Ù¢ => 2", + "Ù£ => 3", + "Ù¤ => 4", + "Ù¥ => 5", + "Ù¦ => 6", + "Ù§ => 7", + "Ù¨ => 8", + "Ù© => 9" + ] + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My license plate is ٢٥٠١٥" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "My license plate is 25015", + "start_offset": 0, + "end_offset": 25, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following term: + +[source,text] +--------------------------- +[ My license plate is 25015 ] +--------------------------- + +Keys and values can be strings with multiple characters. The following +example replaces the `:)` and `:(` emoticons with a text equivalent: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "mapping", + "mappings": [ + ":) => _happy_", + ":( => _sad_" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "I'm delighted about it :(" +} +---------------------------- +// CONSOLE + + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "I'm", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "delighted", + "start_offset": 4, + "end_offset": 13, + "type": "", + "position": 1 + }, + { + "token": "about", + "start_offset": 14, + "end_offset": 19, + "type": "", + "position": 2 + }, + { + "token": "it", + "start_offset": 20, + "end_offset": 22, + "type": "", + "position": 3 + }, + { + "token": "_sad_", + "start_offset": 23, + "end_offset": 25, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ I'm, delighted, about, it, _sad_ ] +--------------------------- diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index e3b85fd7bd1..72adefa5aec 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -1,37 +1,249 @@ [[analysis-pattern-replace-charfilter]] === Pattern Replace Char Filter -The `pattern_replace` char filter allows the use of a regex to -manipulate the characters in a string before analysis. The regular -expression is defined using the `pattern` parameter, and the replacement -string can be provided using the `replacement` parameter (supporting -referencing the original text, as explained -http://docs.oracle.com/javase/6/docs/api/java/util/regex/Matcher.html#appendReplacement(java.lang.StringBuffer,%20java.lang.String)[here]). -For more information check the -http://lucene.apache.org/core/4_3_1/analyzers-common/org/apache/lucene/analysis/pattern/PatternReplaceCharFilter.html[lucene -documentation] +The `pattern_replace` character filter uses a regular expression to match +characters which should be replaced with the specified replacement string. +The replacement string can refer to capture groups in the regular expression. -Here is a sample configuration: +[float] +=== Configuration + +The `pattern_replace` character filter accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression]. Required. + +`replacement`:: + + The replacement string, which can reference capture groups using the + `$1`..`$9` syntax, as explained + http://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#appendReplacement-java.lang.StringBuffer-java.lang.String-[here]. + +[float] +=== Example configuration + +In this example, we configure the `pattern_replace` character filter to +replace any embedded dashes in numbers with underscores, i.e `123-456-789` -> +`123_456_789`: [source,js] --------------------------------------------------- +---------------------------- +PUT my_index { - "index" : { - "analysis" : { - "char_filter" : { - "my_pattern":{ - "type":"pattern_replace", - "pattern":"sample(.*)", - "replacement":"replacedSample $1" - } - }, - "analyzer" : { - "custom_with_char_filter" : { - "tokenizer" : "standard", - "char_filter" : ["my_pattern"] - } - } + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ] } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(\\d+)-(?=\\d)", + "replacement": "$1_" + } + } } + } } --------------------------------------------------- + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "My credit card is 123-456-789" +} +---------------------------- +// CONSOLE +// TEST[skip:Test interprets $1 as a stashed variable] + +The above example produces the following term: + +[source,text] +--------------------------- +[ My, credit, card, is 123_456_789 ] +--------------------------- + + +WARNING: Using a replacement string that changes the length of the original +text will work for search purposes, but will result in incorrect highlighting, +as can be seen in the following example. + +This example inserts a space whenever it encounters a lower-case letter +followed by an upper-case letter (i.e. `fooBarBaz` -> `foo Bar Baz`), allowing +camelCase words to be queried individually: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "standard", + "char_filter": [ + "my_char_filter" + ], + "filter": [ + "lowercase" + ] + } + }, + "char_filter": { + "my_char_filter": { + "type": "pattern_replace", + "pattern": "(?<=\\p{Lower})(?=\\p{Upper})", + "replacement": " " + } + } + } + }, + "mappings": { + "my_type": { + "properties": { + "text": { + "type": "text", + "analyzer": "my_analyzer" + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The fooBarBaz method" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "foo", + "start_offset": 4, + "end_offset": 6, + "type": "", + "position": 1 + }, + { + "token": "bar", + "start_offset": 7, + "end_offset": 9, + "type": "", + "position": 2 + }, + { + "token": "baz", + "start_offset": 10, + "end_offset": 13, + "type": "", + "position": 3 + }, + { + "token": "method", + "start_offset": 14, + "end_offset": 20, + "type": "", + "position": 4 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above returns the following terms: + +[source,js] +---------------------------- +[ the, foo, bar, baz, method ] +---------------------------- + +Querying for `bar` will find the document correctly, but highlighting on the +result will produce incorrect highlights, because our character filter changed +the length of the original text: + +[source,js] +---------------------------- +PUT my_index/my_doc/1?refresh +{ + "text": "The fooBarBaz method" +} + +GET my_index/_search +{ + "query": { + "match": { + "text": "bar" + } + }, + "highlight": { + "fields": { + "text": {} + } + } +} +---------------------------- +// CONSOLE +// TEST[continued] + +The output from the above is: + +[source,js] +---------------------------- +{ + "timed_out": false, + "took": $body.took, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.4375, + "hits": [ + { + "_index": "my_index", + "_type": "my_doc", + "_id": "1", + "_score": 0.4375, + "_source": { + "text": "The fooBarBaz method" + }, + "highlight": { + "text": [ + "The fooBarBaz method" <1> + ] + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +<1> Note the incorrect highlight. diff --git a/docs/reference/analysis/tokenizers.asciidoc b/docs/reference/analysis/tokenizers.asciidoc index 46c02f9a4fc..b30822b6a0b 100644 --- a/docs/reference/analysis/tokenizers.asciidoc +++ b/docs/reference/analysis/tokenizers.asciidoc @@ -1,34 +1,136 @@ [[analysis-tokenizers]] == Tokenizers -Tokenizers are used to break a string down into a stream of terms -or tokens. A simple tokenizer might split the string up into terms -wherever it encounters whitespace or punctuation. +A _tokenizer_ receives a stream of characters, breaks it up into individual +_tokens_ (usually individual words), and outputs a stream of _tokens_. For +instance, a <> tokenizer breaks +text into tokens whenever it sees any whitespace. It would convert the text +`"Quick brown fox!"` into the terms `[Quick, brown, fox!]`. + +The tokenizer is also responsible for recording the order or _position_ of +each term (used for phrase and word proximity queries) and the start and end +_character offsets_ of the original word which the term represents (used for +highlighting search snippets). + +Elasticsearch has a number of built in tokenizers which can be used to build +<>. + +[float] +=== Word Oriented Tokenizers + +The following tokenizers are usually used for tokenizing full text into +individual words: + +<>:: + +The `standard` tokenizer divides text into terms on word boundaries, as +defined by the Unicode Text Segmentation algorithm. It removes most +punctuation symbols. It is the best choice for most languages. + +<>:: + +The `letter` tokenizer divides text into terms whenever it encounters a +character which is not a letter. + +<>:: + +The `lowercase` tokenizer, like the `letter` tokenizer, divides text into +terms whenever it encounters a character which is not a letter, but it also +lowercases all terms. + +<>:: + +The `whitespace` tokenizer divides text into terms whenever it encounters any +whitespace character. + +<>:: + +The `uax_url_email` tokenizer is like the `standard` tokenizer except that it +recognises URLs and email addresses as single tokens. + +<>:: + +The `classic` tokenizer is a grammar based tokenizer for the English Language. + +<>:: + +The `thai` tokenizer segments Thai text into words. + +[float] +=== Partial Word Tokenizers + +These tokenizers break up text or words into small fragments, for partial word +matching: + +<>:: + +The `ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word: a sliding window of continuous letters, e.g. `quick` -> +`[qu, ui, ic, ck]`. + +<>:: + +The `edge_ngram` tokenizer can break up text into words when it encounters any of +a list of specified characters (e.g. whitespace or punctuation), then it returns +n-grams of each word which are anchored to the start of the word, e.g. `quick` -> +`[q, qu, qui, quic, quick]`. + + +[float] +=== Structured Text Tokenizers + +The following tokenizers are usually used with structured text like +identifiers, email addresses, zip codes, and paths, rather than with full +text: + +<>:: + +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters like <> to +normalise the analysed terms. + +<>:: + +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. + +<>:: + +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree, e.g. `/foo/bar/baz` -> `[/foo, /foo/bar, /foo/bar/baz ]`. + + + -Elasticsearch has a number of built in tokenizers which can be -used to build <>. include::tokenizers/standard-tokenizer.asciidoc[] -include::tokenizers/edgengram-tokenizer.asciidoc[] - -include::tokenizers/keyword-tokenizer.asciidoc[] - include::tokenizers/letter-tokenizer.asciidoc[] include::tokenizers/lowercase-tokenizer.asciidoc[] -include::tokenizers/ngram-tokenizer.asciidoc[] - include::tokenizers/whitespace-tokenizer.asciidoc[] -include::tokenizers/pattern-tokenizer.asciidoc[] - include::tokenizers/uaxurlemail-tokenizer.asciidoc[] -include::tokenizers/pathhierarchy-tokenizer.asciidoc[] - include::tokenizers/classic-tokenizer.asciidoc[] include::tokenizers/thai-tokenizer.asciidoc[] + +include::tokenizers/ngram-tokenizer.asciidoc[] + +include::tokenizers/edgengram-tokenizer.asciidoc[] + + +include::tokenizers/keyword-tokenizer.asciidoc[] + +include::tokenizers/pattern-tokenizer.asciidoc[] + +include::tokenizers/pathhierarchy-tokenizer.asciidoc[] + + diff --git a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc index 9b6315cec96..45d4ad41526 100644 --- a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc @@ -1,19 +1,269 @@ [[analysis-classic-tokenizer]] === Classic Tokenizer -A tokenizer of type `classic` providing grammar based tokenizer that is -a good tokenizer for English language documents. This tokenizer has -heuristics for special treatment of acronyms, company names, email addresses, -and internet host names. However, these rules don't always work, and -the tokenizer doesn't work well for most languages other than English. +The `classic` tokenizer is a grammar based tokenizer that is good for English +language documents. This tokenizer has heuristics for special treatment of +acronyms, company names, email addresses, and internet host names. However, +these rules don't always work, and the tokenizer doesn't work well for most +languages other than English: + +* It splits words at most punctuation characters, removing punctuation. However, a + dot that's not followed by whitespace is considered part of a token. + +* It splits words at hyphens, unless there's a number in the token, in which case + the whole token is interpreted as a product number and is not split. + +* It recognizes email addresses and internet hostnames as one token. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "classic", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `classic` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `classic` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "classic", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `classic` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc index 41cc2337940..2328354998e 100644 --- a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc @@ -1,80 +1,323 @@ [[analysis-edgengram-tokenizer]] === Edge NGram Tokenizer -A tokenizer of type `edgeNGram`. +The `edge_ngram` tokenizer first breaks text down into words whenever it +encounters one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word where the start of +the N-gram is anchored to the beginning of the word. -This tokenizer is very similar to `nGram` but only keeps n-grams which -start at the beginning of a token. +Edge N-Grams are useful for _search-as-you-type_ queries. -The following are settings that can be set for a `edgeNGram` tokenizer -type: +TIP: When you need _search-as-you-type_ for text which has a widely known +order, such as movie or song titles, the +<> is a much more efficient +choice than edge N-grams. Edge N-grams have the advantage when trying to +autocomplete words that can appear in any order. -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. +[float] +=== Example output -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. +With the default settings, the `edge_ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: -|`token_chars` | Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "edge_ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// -`token_chars` accepts the following character classes: +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu ] +--------------------------- + +NOTE: These default gram lengths are almost entirely useless. You need to +configure the `edge_ngram` before using it. + +[float] +=== Configuration + +The `edge_ngram` tokenizer accepts the following parameters: [horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` [float] -==== Example +=== Example configuration + +In this example, we configure the `edge_ngram` tokenizer to treat letters and +digits as tokens, and to produce grams with minimum length `2` and maximum +length `10`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_edge_ngram_analyzer" : { - "tokenizer" : "my_edge_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_edge_ngram_tokenizer" : { - "type" : "edgeNGram", - "min_gram" : "2", - "max_gram" : "5", - "token_chars": [ "letter", "digit" ] - } - } - } +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" } - }' + }, + "tokenizer": { + "my_tokenizer": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_edge_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, Scha, Schal, 04 --------------------------------------------------- +GET _cluster/health?wait_for_status=yellow -[float] -==== `side` deprecated +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE -There used to be a `side` parameter up to `0.90.1` but it is now deprecated. In -order to emulate the behavior of `"side" : "BACK"` a -<> should be used together -with the <>. The -`edgeNGram` filter must be enclosed in `reverse` filters like this: +///////////////////// [source,js] --------------------------------------------------- - "filter" : ["reverse", "edgeNGram", "reverse"] --------------------------------------------------- +---------------------------- +{ + "tokens": [ + { + "token": "Qu", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "Quic", + "start_offset": 2, + "end_offset": 6, + "type": "word", + "position": 2 + }, + { + "token": "Quick", + "start_offset": 2, + "end_offset": 7, + "type": "word", + "position": 3 + }, + { + "token": "Fo", + "start_offset": 8, + "end_offset": 10, + "type": "word", + "position": 4 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 5 + }, + { + "token": "Foxe", + "start_offset": 8, + "end_offset": 12, + "type": "word", + "position": 6 + }, + { + "token": "Foxes", + "start_offset": 8, + "end_offset": 13, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qu, Qui, Quic, Quick, Fo, Fox, Foxe, Foxes ] +--------------------------- + +Usually we recommend using the same `analyzer` at index time and at search +time. In the case of the `edge_ngram` tokenizer, the advice is different. It +only makes sense to use the `edge_ngram` tokenizer at index time, to ensure +that partial words are available for matching in the index. At search time, +just search for the terms the user has typed in, for instance: `Quick Fo`. + +Below is an example of how to set up a field for _search-as-you-type_: + +[source,js] +----------------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "autocomplete": { + "tokenizer": "autocomplete", + "filter": [ + "lowercase" + ] + }, + "autocomplete_search": { + "tokenizer": "lowercase" + } + }, + "tokenizer": { + "autocomplete": { + "type": "edge_ngram", + "min_gram": 2, + "max_gram": 10, + "token_chars": [ + "letter" + ] + } + } + } + }, + "mappings": { + "doc": { + "properties": { + "title": { + "type": "text", + "analyzer": "autocomplete", + "search_analyzer": "autocomplete_search" + } + } + } + } +} + +PUT my_index/doc/1 +{ + "title": "Quick Foxes" <1> +} + +POST my_index/_refresh + +GET my_index/_search +{ + "query": { + "match": { + "title": { + "query": "Quick Fo", <2> + "operator": "and" + } + } + } +} +----------------------------------- +// CONSOLE + +<1> The `autocomplete` analyzer indexes the terms `[qu, qui, quic, quick, fo, fox, foxe, foxes]`. +<2> The `autocomplete_search` analyzer searches for the terms `[quick, fo]`, both of which appear in the index. + +///////////////////// + +[source,js] +---------------------------- +{ + "took": $body.took, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.44194174, + "hits": [ + { + "_index": "my_index", + "_type": "doc", + "_id": "1", + "_score": 0.44194174, + "_source": { + "title": "Quick Foxes" + } + } + ] + } +} +---------------------------- +// TESTRESPONSE[s/"took".*/"took": "$body.took",/] +///////////////////// -which essentially reverses the token, builds front `EdgeNGrams` and reverses -the ngram again. This has the same effect as the previous `"side" : "BACK"` setting. diff --git a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc index ad1652466be..27515516fe5 100644 --- a/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/keyword-tokenizer.asciidoc @@ -1,15 +1,60 @@ [[analysis-keyword-tokenizer]] === Keyword Tokenizer -A tokenizer of type `keyword` that emits the entire input as a single -output. +The `keyword` tokenizer is a ``noop'' tokenizer that accepts whatever text it +is given and outputs the exact same text as a single term. It can be combined +with token filters to normalise output, e.g. lower-casing email addresses. -The following are settings that can be set for a `keyword` tokenizer -type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================= -|Setting |Description -|`buffer_size` |The term buffer size. Defaults to `256`. -|======================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "keyword", + "text": "New York" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "New York", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following term: + +[source,text] +--------------------------- +[ New York ] +--------------------------- + +[float] +=== Configuration + +The `keyword` tokenizer accepts the following parameters: + +[horizontal] +`buffer_size`:: + + The number of characters read into the term buffer in a single pass. + Defaults to `256`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. diff --git a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc index 03025ccd303..7423a68732d 100644 --- a/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/letter-tokenizer.asciidoc @@ -1,7 +1,123 @@ [[analysis-letter-tokenizer]] === Letter Tokenizer -A tokenizer of type `letter` that divides text at non-letters. That's to -say, it defines tokens as maximal strings of adjacent letters. Note, -this does a decent job for most European languages, but does a terrible -job for some Asian languages, where words are not separated by spaces. +The `letter` tokenizer breaks text into terms whenever it encounters a +character which is not a letter. It does a reasonable job for most European +languages, but does a terrible job for some Asian languages, where words are +not separated by spaces. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "letter", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, QUICK, Brown, Foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `letter` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc index 0cdbbc387a4..5aad28b4394 100644 --- a/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/lowercase-tokenizer.asciidoc @@ -1,15 +1,128 @@ [[analysis-lowercase-tokenizer]] === Lowercase Tokenizer -A tokenizer of type `lowercase` that performs the function of -<> and -<> together. It divides text at non-letters and converts -them to lower case. While it is functionally equivalent to the -combination of -<> and -<>, there is a performance advantage to doing the two -tasks at once, hence this (redundant) implementation. + +The `lowercase` toknenizer, like the +<> breaks text into terms +whenever it encounters a character which is not a letter, but it also +lowecases all terms. It is functionally equivalent to the +<> combined with the +<>, but is more +efficient as it performs both steps in a single pass. + + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "lowercase", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "the", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "quick", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 1 + }, + { + "token": "brown", + "start_offset": 12, + "end_offset": 17, + "type": "word", + "position": 2 + }, + { + "token": "foxes", + "start_offset": 18, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog", + "start_offset": 45, + "end_offset": 48, + "type": "word", + "position": 8 + }, + { + "token": "s", + "start_offset": 49, + "end_offset": 50, + "type": "word", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "word", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ the, quick, brown, foxes, jumped, over, the, lazy, dog, s, bone ] +--------------------------- + +[float] +=== Configuration + +The `lowercase` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc index 23e6bc52dda..cf45da0627e 100644 --- a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc @@ -1,57 +1,306 @@ [[analysis-ngram-tokenizer]] === NGram Tokenizer -A tokenizer of type `nGram`. +The `ngram` tokenizer first breaks text down into words whenever it encounters +one of a list of specified characters, then it emits +https://en.wikipedia.org/wiki/N-gram[N-grams] of each word of the specified +length. -The following are settings that can be set for a `nGram` tokenizer type: - -[cols="<,<,<",options="header",] -|======================================================================= -|Setting |Description |Default value -|`min_gram` |Minimum size in codepoints of a single n-gram |`1`. - -|`max_gram` |Maximum size in codepoints of a single n-gram |`2`. - -|`token_chars` |Characters classes to keep in the -tokens, Elasticsearch will split on characters that don't belong to any -of these classes. |`[]` (Keep all characters) -|======================================================================= - -`token_chars` accepts the following character classes: - -[horizontal] -`letter`:: for example `a`, `b`, `ï` or `京` -`digit`:: for example `3` or `7` -`whitespace`:: for example `" "` or `"\n"` -`punctuation`:: for example `!` or `"` -`symbol`:: for example `$` or `√` +N-grams are like a sliding window that moves across the word - a continuous +sequence of characters of the specified length. They are useful for querying +languages that don't use spaces or that have long compound words, like German. [float] -==== Example +=== Example output + +With the default settings, the `ngram` tokenizer treats the initial text as a +single token and produces N-grams with minimum length `1` and maximum length +`2`: [source,js] --------------------------------------------------- - curl -XPUT 'localhost:9200/test' -d ' - { - "settings" : { - "analysis" : { - "analyzer" : { - "my_ngram_analyzer" : { - "tokenizer" : "my_ngram_tokenizer" - } - }, - "tokenizer" : { - "my_ngram_tokenizer" : { - "type" : "nGram", - "min_gram" : "2", - "max_gram" : "3", - "token_chars": [ "letter", "digit" ] - } - } - } - } - }' +--------------------------- +POST _analyze +{ + "tokenizer": "ngram", + "text": "Quick Fox" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Q", + "start_offset": 0, + "end_offset": 1, + "type": "word", + "position": 0 + }, + { + "token": "Qu", + "start_offset": 0, + "end_offset": 2, + "type": "word", + "position": 1 + }, + { + "token": "u", + "start_offset": 1, + "end_offset": 2, + "type": "word", + "position": 2 + }, + { + "token": "ui", + "start_offset": 1, + "end_offset": 3, + "type": "word", + "position": 3 + }, + { + "token": "i", + "start_offset": 2, + "end_offset": 3, + "type": "word", + "position": 4 + }, + { + "token": "ic", + "start_offset": 2, + "end_offset": 4, + "type": "word", + "position": 5 + }, + { + "token": "c", + "start_offset": 3, + "end_offset": 4, + "type": "word", + "position": 6 + }, + { + "token": "ck", + "start_offset": 3, + "end_offset": 5, + "type": "word", + "position": 7 + }, + { + "token": "k", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 8 + }, + { + "token": "k ", + "start_offset": 4, + "end_offset": 6, + "type": "word", + "position": 9 + }, + { + "token": " ", + "start_offset": 5, + "end_offset": 6, + "type": "word", + "position": 10 + }, + { + "token": " F", + "start_offset": 5, + "end_offset": 7, + "type": "word", + "position": 11 + }, + { + "token": "F", + "start_offset": 6, + "end_offset": 7, + "type": "word", + "position": 12 + }, + { + "token": "Fo", + "start_offset": 6, + "end_offset": 8, + "type": "word", + "position": 13 + }, + { + "token": "o", + "start_offset": 7, + "end_offset": 8, + "type": "word", + "position": 14 + }, + { + "token": "ox", + "start_offset": 7, + "end_offset": 9, + "type": "word", + "position": 15 + }, + { + "token": "x", + "start_offset": 8, + "end_offset": 9, + "type": "word", + "position": 16 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Q, Qu, u, ui, i, ic, c, ck, k, "k ", " ", " F", F, Fo, o, ox, x ] +--------------------------- + +[float] +=== Configuration + +The `ngram` tokenizer accepts the following parameters: + +[horizontal] +`min_gram`:: + Minimum length of characters in a gram. Defaults to `1`. + +`max_gram`:: + Maximum length of characters in a gram. Defaults to `2`. + +`token_chars`:: + + Character classes that should be included in a token. Elasticsearch + will split on characters that don't belong to the classes specified. + Defaults to `[]` (keep all characters). ++ +Character classes may be any of the following: ++ +* `letter` -- for example `a`, `b`, `ï` or `京` +* `digit` -- for example `3` or `7` +* `whitespace` -- for example `" "` or `"\n"` +* `punctuation` -- for example `!` or `"` +* `symbol` -- for example `$` or `√` + +TIP: It usually makes sense to set `min_gram` and `max_gram` to the same +value. The smaller the length, the more documents will match but the lower +the quality of the matches. The longer the length, the more specific the +matches. A tri-gram (length `3`) is a good place to start. + +[float] +=== Example configuration + +In this example, we configure the `ngram` tokenizer to treat letters and +digits as tokens, and to produce tri-grams (grams of length `3`): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "ngram", + "min_gram": 3, + "max_gram": 3, + "token_chars": [ + "letter", + "digit" + ] + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "2 Quick Foxes." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Qui", + "start_offset": 2, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "uic", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ick", + "start_offset": 4, + "end_offset": 7, + "type": "word", + "position": 2 + }, + { + "token": "Fox", + "start_offset": 8, + "end_offset": 11, + "type": "word", + "position": 3 + }, + { + "token": "oxe", + "start_offset": 9, + "end_offset": 12, + "type": "word", + "position": 4 + }, + { + "token": "xes", + "start_offset": 10, + "end_offset": 13, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ Qui, uic, ick, Fox, oxe, xes ] +--------------------------- + - curl 'localhost:9200/test/_analyze?pretty=1&analyzer=my_ngram_analyzer' -d 'FC Schalke 04' - # FC, Sc, Sch, ch, cha, ha, hal, al, alk, lk, lke, ke, 04 --------------------------------------------------- diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index e6876f55bc6..b656e67eaec 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -1,32 +1,175 @@ [[analysis-pathhierarchy-tokenizer]] === Path Hierarchy Tokenizer -The `path_hierarchy` tokenizer takes something like this: +The `path_hierarchy` tokenizer takes a hierarchical value like a filesystem +path, splits on the path separator, and emits a term for each component in the +tree. -------------------------- -/something/something/else -------------------------- +[float] +=== Example output -And produces tokens: +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "path_hierarchy", + "text": "/one/two/three" +} +--------------------------- +// CONSOLE -------------------------- -/something -/something/something -/something/something/else -------------------------- +///////////////////// -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`delimiter` |The character delimiter to use, defaults to `/`. +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/one", + "start_offset": 0, + "end_offset": 4, + "type": "word", + "position": 0 + }, + { + "token": "/one/two", + "start_offset": 0, + "end_offset": 8, + "type": "word", + "position": 0 + }, + { + "token": "/one/two/three", + "start_offset": 0, + "end_offset": 14, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE -|`replacement` |An optional replacement character to use. Defaults to -the `delimiter`. +///////////////////// -|`buffer_size` |The buffer size to use, defaults to `1024`. -|`reverse` |Generates tokens in reverse order, defaults to `false`. -|`skip` |Controls initial tokens to skip, defaults to `0`. -|======================================================================= +The above text would produce the following terms: + +[source,text] +--------------------------- +[ /one, /one/two, /one/two/three ] +--------------------------- + +[float] +=== Configuration + +The `path_hierarchy` tokenizer accepts the following parameters: + +[horizontal] +`delimiter`:: + The character to use as the path separator. Defaults to `/`. + +`replacement`:: + An optional replacement character to use for the delimiter. + Defaults to the `delimiter`. + +`buffer_size`:: + The number of characters read into the term buffer in a single pass. + Defaults to `1024`. The term buffer will grow by this size until all the + text has been consumed. It is advisable not to change this setting. + +`reverse`:: + If set to `true`, emits the tokens in reverse order. Defaults to `false`. + +`skip`:: + The number of initial tokens to skip. Defaults to `0`. + +[float] +=== Example configuration + +In this example, we configure the `path_hierarchy` tokenizer to split on `-` +characters, and to replace them with `/`. The first two tokens are skipped: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "path_hierarchy", + "delimiter": "-", + "replacement": "/", + "skip": 2 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "one-two-three-four-five" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "/three", + "start_offset": 7, + "end_offset": 13, + "type": "word", + "position": 0 + }, + { + "token": "/three/four", + "start_offset": 7, + "end_offset": 18, + "type": "word", + "position": 0 + }, + { + "token": "/three/four/five", + "start_offset": 7, + "end_offset": 23, + "type": "word", + "position": 0 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ /three, /three/four, /three/four/five ] +--------------------------- + +If we were to set `reverse` to `true`, it would produce the following: + +[source,text] +--------------------------- +[ one/two/three/, two/three/, three/ ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc index 9a148456195..ca902a4e5f2 100644 --- a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc @@ -1,38 +1,268 @@ [[analysis-pattern-tokenizer]] === Pattern Tokenizer -A tokenizer of type `pattern` that can flexibly separate text into terms -via a regular expression. Accepts the following settings: +The `pattern` tokenizer uses a regular expression to either split text into +terms whenever it matches a word separator, or to capture matching text as +terms. -[cols="<,<",options="header",] -|====================================================================== -|Setting |Description -|`pattern` |The regular expression pattern, defaults to `\W+`. -|`flags` |The regular expression flags. -|`group` |Which group to extract into tokens. Defaults to `-1` (split). -|====================================================================== +The default pattern is `\W+`, which splits text whenever it encounters +non-word characters. -*IMPORTANT*: The regular expression should match the *token separators*, -not the tokens themselves. +[float] +=== Example output -********************************************* -Note that you may need to escape `pattern` string literal according to -your client language rules. For example, in many programming languages -a string literal for `\W+` pattern is written as `"\\W+"`. -There is nothing special about `pattern` (you may have to escape other -string literals as well); escaping `pattern` is common just because it -often contains characters that should be escaped. -********************************************* +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "pattern", + "text": "The foo_bar_size's default is 5." +} +--------------------------- +// CONSOLE -`group` set to `-1` (the default) is equivalent to "split". Using group ->= 0 selects the matching group as the token. For example, if you have: +///////////////////// ------------------------- -pattern = '([^']+)' -group = 0 -input = aaa 'bbb' 'ccc' ------------------------- +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "foo_bar_size", + "start_offset": 4, + "end_offset": 16, + "type": "word", + "position": 1 + }, + { + "token": "s", + "start_offset": 17, + "end_offset": 18, + "type": "word", + "position": 2 + }, + { + "token": "default", + "start_offset": 19, + "end_offset": 26, + "type": "word", + "position": 3 + }, + { + "token": "is", + "start_offset": 27, + "end_offset": 29, + "type": "word", + "position": 4 + }, + { + "token": "5", + "start_offset": 30, + "end_offset": 31, + "type": "word", + "position": 5 + } + ] +} +---------------------------- +// TESTRESPONSE -the output will be two tokens: `'bbb'` and `'ccc'` (including the `'` -marks). With the same input but using group=1, the output would be: -`bbb` and `ccc` (no `'` marks). +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, foo_bar_size, s, default, is, 5 ] +--------------------------- + +[float] +=== Configuration + +The `pattern` tokenizer accepts the following parameters: + +[horizontal] +`pattern`:: + + A http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expression], defaults to `\W+`. + +`flags`:: + + Java regular expression http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#field.summary[flags]. + lags should be pipe-separated, eg `"CASE_INSENSITIVE|COMMENTS"`. + +`group`:: + + Which capture group to extract as tokens. Defaults to `-1` (split). + +[float] +=== Example configuration + +In this example, we configure the `pattern` tokenizer to break text into +tokens when it encounters commas: + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "," + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "comma,separated,values" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "comma", + "start_offset": 0, + "end_offset": 5, + "type": "word", + "position": 0 + }, + { + "token": "separated", + "start_offset": 6, + "end_offset": 15, + "type": "word", + "position": 1 + }, + { + "token": "values", + "start_offset": 16, + "end_offset": 22, + "type": "word", + "position": 2 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ comma, separated, values ] +--------------------------- + +In the next example, we configure the `pattern` tokenizer to capture values +enclosed in double quotes (ignoring embedded escaped quotes `\"`). The regex +itself looks like this: + + "((?:\\"|[^"]|\\")*)" + +And reads as follows: + +* A literal `"` +* Start capturing: +** A literal `\"` OR any character except `"` +** Repeat until no more characters match +* A literal closing `"` + +When the pattern is specified in JSON, the `"` and `\` characters need to be +escaped, so the pattern ends up looking like: + + \"((?:\\\\\"|[^\"]|\\\\\")+)\" + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "pattern", + "pattern": "\"((?:\\\\\"|[^\"]|\\\\\")+)\"", + "group": 1 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "\"value\", \"value with embedded \\\" quote\"" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "value", + "start_offset": 1, + "end_offset": 6, + "type": "word", + "position": 0 + }, + { + "token": "value with embedded \\\" quote", + "start_offset": 10, + "end_offset": 38, + "type": "word", + "position": 1 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + +The above example produces the following two terms: + +[source,text] +--------------------------- +[ value, value with embedded \" quote ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index 42dbe5a864a..ee052529b43 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -1,18 +1,274 @@ [[analysis-standard-tokenizer]] === Standard Tokenizer -A tokenizer of type `standard` providing grammar based tokenizer that is -a good tokenizer for most European language documents. The tokenizer -implements the Unicode Text Segmentation algorithm, as specified in -http://unicode.org/reports/tr29/[Unicode Standard Annex #29]. +The `standard` tokenizer provides grammar based tokenization (based on the +Unicode Text Segmentation algorithm, as specified in +http://unicode.org/reports/tr29/[Unicode Standard Annex #29]) and works well +for most languages. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "standard", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "", + "position": 5 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 6 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 7 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 8 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 9 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 10 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumped, over, the, lazy, dog's, bone ] +--------------------------- + +[float] +=== Configuration + +The `standard` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `standard` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "standard", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "Brown", + "start_offset": 12, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "Foxes", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "jumpe", + "start_offset": 24, + "end_offset": 29, + "type": "", + "position": 5 + }, + { + "token": "d", + "start_offset": 29, + "end_offset": 30, + "type": "", + "position": 6 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "", + "position": 7 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "", + "position": 8 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "", + "position": 9 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "", + "position": 10 + }, + { + "token": "bone", + "start_offset": 51, + "end_offset": 55, + "type": "", + "position": 11 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] +--------------------------- -The following are settings that can be set for a `standard` tokenizer -type: -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is split at `max_token_length` intervals. Defaults to `255`. -|======================================================================= diff --git a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc index 06f0b6892e7..3e9904d116e 100644 --- a/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/thai-tokenizer.asciidoc @@ -1,7 +1,106 @@ [[analysis-thai-tokenizer]] === Thai Tokenizer -A tokenizer of type `thai` that segments Thai text into words. This tokenizer -uses the built-in Thai segmentation algorithm included with Java to divide -up Thai text. Text in other languages in general will be treated the same -as `standard`. +The `thai` tokenizer segments Thai text into words, using the Thai +segmentation algorithm included with Java. Text in other languages in general +will be treated the same as the +<>. + +WARNING: This tokenizer may not be supported by all JREs. It is known to work +with Sun/Oracle and OpenJDK. If your application needs to be fully portable, +consider using the {plugins}/analysis-icu-tokenizer.html[ICU Tokenizer] instead. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "thai", + "text": "à¸à¸²à¸£à¸—ี่ได้ต้องà¹à¸ªà¸”งว่างานดี" +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "à¸à¸²à¸£", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "ที่", + "start_offset": 3, + "end_offset": 6, + "type": "word", + "position": 1 + }, + { + "token": "ได้", + "start_offset": 6, + "end_offset": 9, + "type": "word", + "position": 2 + }, + { + "token": "ต้อง", + "start_offset": 9, + "end_offset": 13, + "type": "word", + "position": 3 + }, + { + "token": "à¹à¸ªà¸”ง", + "start_offset": 13, + "end_offset": 17, + "type": "word", + "position": 4 + }, + { + "token": "ว่า", + "start_offset": 17, + "end_offset": 20, + "type": "word", + "position": 5 + }, + { + "token": "งาน", + "start_offset": 20, + "end_offset": 23, + "type": "word", + "position": 6 + }, + { + "token": "ดี", + "start_offset": 23, + "end_offset": 25, + "type": "word", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ à¸à¸²à¸£, ที่, ได้, ต้อง, à¹à¸ªà¸”ง, ว่า, งาน, ดี ] +--------------------------- + +[float] +=== Configuration + +The `thai` tokenizer is not configurable. diff --git a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc index 9ed28e60b91..500a5e191f1 100644 --- a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc @@ -1,16 +1,199 @@ [[analysis-uaxurlemail-tokenizer]] -=== UAX Email URL Tokenizer +=== UAX URL Email Tokenizer -A tokenizer of type `uax_url_email` which works exactly like the -`standard` tokenizer, but tokenizes emails and urls as single tokens. +The `uax_url_email` tokenizer is like the <> except that it +recognises URLs and email addresses as single tokens. -The following are settings that can be set for a `uax_url_email` -tokenizer type: +[float] +=== Example output -[cols="<,<",options="header",] -|======================================================================= -|Setting |Description -|`max_token_length` |The maximum token length. If a token is seen that -exceeds this length then it is discarded. Defaults to `255`. -|======================================================================= +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "uax_url_email", + "text": "Email me at john.smith@global-international.com" +} +--------------------------- +// CONSOLE +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "Email", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "me", + "start_offset": 6, + "end_offset": 8, + "type": "", + "position": 1 + }, + { + "token": "at", + "start_offset": 9, + "end_offset": 11, + "type": "", + "position": 2 + }, + { + "token": "john.smith@global-international.com", + "start_offset": 12, + "end_offset": 47, + "type": "", + "position": 3 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ Email, me, at, john.smith@global-international.com ] +--------------------------- + +while the `standard` tokenizer would produce: + +[source,text] +--------------------------- +[ Email, me, at, john.smith, global, international.com ] +--------------------------- + +[float] +=== Configuration + +The `uax_url_email` tokenizer accepts the following parameters: + +[horizontal] +`max_token_length`:: + + The maximum token length. If a token is seen that exceeds this length then + it is split at `max_token_length` intervals. Defaults to `255`. + +[float] +=== Example configuration + +In this example, we configure the `uax_url_email` tokenizer to have a +`max_token_length` of 5 (for demonstration purposes): + +[source,js] +---------------------------- +PUT my_index +{ + "settings": { + "analysis": { + "analyzer": { + "my_analyzer": { + "tokenizer": "my_tokenizer" + } + }, + "tokenizer": { + "my_tokenizer": { + "type": "uax_url_email", + "max_token_length": 5 + } + } + } + } +} + +GET _cluster/health?wait_for_status=yellow + +POST my_index/_analyze +{ + "analyzer": "my_analyzer", + "text": "john.smith@global-international.com" +} +---------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "john", + "start_offset": 0, + "end_offset": 4, + "type": "", + "position": 0 + }, + { + "token": "smith", + "start_offset": 5, + "end_offset": 10, + "type": "", + "position": 1 + }, + { + "token": "globa", + "start_offset": 11, + "end_offset": 16, + "type": "", + "position": 2 + }, + { + "token": "l", + "start_offset": 16, + "end_offset": 17, + "type": "", + "position": 3 + }, + { + "token": "inter", + "start_offset": 18, + "end_offset": 23, + "type": "", + "position": 4 + }, + { + "token": "natio", + "start_offset": 23, + "end_offset": 28, + "type": "", + "position": 5 + }, + { + "token": "nal.c", + "start_offset": 28, + "end_offset": 33, + "type": "", + "position": 6 + }, + { + "token": "om", + "start_offset": 33, + "end_offset": 35, + "type": "", + "position": 7 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above example produces the following terms: + +[source,text] +--------------------------- +[ john, smith, globa, l, inter, natio, nal.c, om ] +--------------------------- diff --git a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc index f0e1ce28a12..9d06ea28d55 100644 --- a/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/whitespace-tokenizer.asciidoc @@ -1,4 +1,114 @@ [[analysis-whitespace-tokenizer]] -=== Whitespace Tokenizer +=== Whitespace Analyzer -A tokenizer of type `whitespace` that divides text at whitespace. +The `whitespace` tokenizer breaks text into terms whenever it encounters a +whitespace character. + +[float] +=== Example output + +[source,js] +--------------------------- +POST _analyze +{ + "tokenizer": "whitespace", + "text": "The 2 QUICK Brown-Foxes jumped over the lazy dog's bone." +} +--------------------------- +// CONSOLE + +///////////////////// + +[source,js] +---------------------------- +{ + "tokens": [ + { + "token": "The", + "start_offset": 0, + "end_offset": 3, + "type": "word", + "position": 0 + }, + { + "token": "2", + "start_offset": 4, + "end_offset": 5, + "type": "word", + "position": 1 + }, + { + "token": "QUICK", + "start_offset": 6, + "end_offset": 11, + "type": "word", + "position": 2 + }, + { + "token": "Brown-Foxes", + "start_offset": 12, + "end_offset": 23, + "type": "word", + "position": 3 + }, + { + "token": "jumped", + "start_offset": 24, + "end_offset": 30, + "type": "word", + "position": 4 + }, + { + "token": "over", + "start_offset": 31, + "end_offset": 35, + "type": "word", + "position": 5 + }, + { + "token": "the", + "start_offset": 36, + "end_offset": 39, + "type": "word", + "position": 6 + }, + { + "token": "lazy", + "start_offset": 40, + "end_offset": 44, + "type": "word", + "position": 7 + }, + { + "token": "dog's", + "start_offset": 45, + "end_offset": 50, + "type": "word", + "position": 8 + }, + { + "token": "bone.", + "start_offset": 51, + "end_offset": 56, + "type": "word", + "position": 9 + } + ] +} +---------------------------- +// TESTRESPONSE + +///////////////////// + + +The above sentence would produce the following terms: + +[source,text] +--------------------------- +[ The, 2, QUICK, Brown-Foxes, jumped, over, the, lazy, dog's, bone. ] +--------------------------- + +[float] +=== Configuration + +The `whitespace` tokenizer is not configurable. diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index 7a8a3e4a8ce..3b07b40d9e7 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -30,25 +30,26 @@ The response looks like: "primary" : false }, "assigned" : false, <1> + "shard_state_fetch_pending": false, <2> "unassigned_info" : { - "reason" : "INDEX_CREATED", <2> + "reason" : "INDEX_CREATED", <3> "at" : "2016-03-22T20:04:23.620Z" }, - "allocation_delay_ms" : 0, <3> - "remaining_delay_ms" : 0, <4> + "allocation_delay_ms" : 0, <4> + "remaining_delay_ms" : 0, <5> "nodes" : { "V-Spi0AyRZ6ZvKbaI3691w" : { "node_name" : "node1", - "node_attributes" : { <5> + "node_attributes" : { <6> "bar" : "baz" }, "store" : { - "shard_copy" : "NONE" <6> + "shard_copy" : "NONE" <7> }, - "final_decision" : "NO", <7> + "final_decision" : "NO", <8> "final_explanation" : "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", - "weight" : 0.06666675, <8> - "decisions" : [ { <9> + "weight" : 0.06666675, <9> + "decisions" : [ { <10> "decider" : "filter", "decision" : "NO", "explanation" : "node does not match index include filters [foo:\"bar\"]" @@ -91,14 +92,15 @@ The response looks like: } -------------------------------------------------- <1> Whether the shard is assigned or unassigned -<2> Reason for the shard originally becoming unassigned -<3> Configured delay before the shard can be allocated -<4> Remaining delay before the shard can be allocated -<5> User-added attributes the node has -<6> The shard copy information for this node and error (if applicable) -<7> Final decision and explanation of whether the shard can be allocated to this node -<8> Weight for how much the allocator would like to allocate the shard to this node -<9> List of node decisions factoring into final decision about the shard +<2> Whether information about the shard is still being fetched +<3> Reason for the shard originally becoming unassigned +<4> Configured delay before the shard can be allocated +<5> Remaining delay before the shard can be allocated +<6> User-added attributes the node has +<7> The shard copy information for this node and error (if applicable) +<8> Final decision and explanation of whether the shard can be allocated to this node +<9> Weight for how much the allocator would like to allocate the shard to this node +<10> List of node decisions factoring into final decision about the shard For a shard that is already assigned, the output looks similar to: @@ -113,6 +115,7 @@ For a shard that is already assigned, the output looks similar to: }, "assigned" : true, "assigned_node_id" : "Qc6VL8c5RWaw1qXZ0Rg57g", <1> + "shard_state_fetch_pending": false, "allocation_delay_ms" : 0, "remaining_delay_ms" : 0, "nodes" : { diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index 99e754df529..bb48a00fbe5 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -103,3 +103,16 @@ are available: To ensure that these implications are well-understood, this command requires the special field `accept_data_loss` to be explicitly set to `true` for it to work. + +[float] +=== Retry failed shards + +The cluster will attempt to allocate a shard a maximum of +`index.allocation.max_retries` times in a row (defaults to `5`), before giving +up and leaving the shard unallocated. This scenario can be caused by +structural problems such as having an analyzer which refers to a stopwords +file which doesn't exist on all nodes. + +Once the problem has been corrected, allocation can be manually retried by +calling the <> API with `?retry_failed`, which +will attempt a single retry round for these shards. \ No newline at end of file diff --git a/docs/reference/docs/delete.asciidoc b/docs/reference/docs/delete.asciidoc index f36ffe7abf9..175c07d005e 100644 --- a/docs/reference/docs/delete.asciidoc +++ b/docs/reference/docs/delete.asciidoc @@ -70,8 +70,8 @@ setting the routing parameter. Note that deleting a parent document does not automatically delete its children. One way of deleting all child documents given a parent's id is -to use the `delete-by-query` plugin to perform a delete on the child -index with the automatically generated (and indexed) +to use the <> to perform a + index with the automatically generated (and indexed) field _parent, which is in the format parent_type#parent_id. [float] diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 977cb4e5a1d..132b287bd46 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -163,7 +163,7 @@ As mentioned previously, we can override either the cluster or node name. This c [source,sh] -------------------------------------------------- -./elasticsearch -Ees.cluster.name=my_cluster_name -Ees.node.name=my_node_name +./elasticsearch -Ecluster.name=my_cluster_name -Enode.name=my_node_name -------------------------------------------------- Also note the line marked http with information about the HTTP address (`192.168.8.112`) and port (`9200`) that our node is reachable from. By default, Elasticsearch uses port `9200` to provide access to its REST API. This port is configurable if necessary. diff --git a/docs/reference/index-modules/allocation/filtering.asciidoc b/docs/reference/index-modules/allocation/filtering.asciidoc index be45cd2a1ac..05007b46188 100644 --- a/docs/reference/index-modules/allocation/filtering.asciidoc +++ b/docs/reference/index-modules/allocation/filtering.asciidoc @@ -14,7 +14,7 @@ attribute as follows: [source,sh] ------------------------ -bin/elasticsearch -Ees.node.attr.rack=rack1 -Ees.node.attr.size=big <1> +bin/elasticsearch -Enode.attr.rack=rack1 -Enode.attr.size=big <1> ------------------------ <1> These attribute settings can also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/index-modules/allocation/prioritization.asciidoc b/docs/reference/index-modules/allocation/prioritization.asciidoc index eecc98abcb0..92051cc4dbc 100644 --- a/docs/reference/index-modules/allocation/prioritization.asciidoc +++ b/docs/reference/index-modules/allocation/prioritization.asciidoc @@ -13,7 +13,7 @@ This means that, by default, newer indices will be recovered before older indice Use the per-index dynamically updateable `index.priority` setting to customise the index prioritization order. For instance: -[source,json] +[source,js] ------------------------------ PUT index_1 @@ -45,7 +45,7 @@ In the above example: This setting accepts an integer, and can be updated on a live index with the <>: -[source,json] +[source,js] ------------------------------ PUT index_4/_settings { diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 07591dc277b..8173b98d505 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -118,6 +118,8 @@ This similarity has the following options: [horizontal] `independence_measure`:: Possible values `standardized`, `saturated`, `chisquared`. +Type name: `DFI` + [float] [[ib]] ==== IB similarity. diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 38cb0ea624f..0553c302b54 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -57,16 +57,14 @@ The following tables lists all the defaults ISO formats supported: `epoch_millis`:: A formatter for the number of milliseconds since the epoch. Note, that - this timestamp allows a max length of 13 chars, so only dates between 1653 - and 2286 are supported. You should use a different date formatter in - that case. + this timestamp is subject to the limits of a Java `Long.MIN_VALUE` and + `Long.MAX_VALUE`. `epoch_second`:: A formatter for the number of seconds since the epoch. Note, that this - timestamp allows a max length of 10 chars, so only dates between 1653 and - 2286 are supported. You should use a different date formatter in that - case. + timestamp is subject to the limits of a Java `Long.MIN_VALUE` and `Long. + MAX_VALUE` divided by 1000 (the number of milliseconds in a second). [[strict-date-time]]`date_optional_time` or `strict_date_optional_time`:: diff --git a/docs/reference/migration/migrate_5_0/allocation.asciidoc b/docs/reference/migration/migrate_5_0/allocation.asciidoc index 1e095831381..d7affd8ff95 100644 --- a/docs/reference/migration/migrate_5_0/allocation.asciidoc +++ b/docs/reference/migration/migrate_5_0/allocation.asciidoc @@ -39,6 +39,11 @@ command corresponds to the old `allocate` command with `allow_primary` set to false. The new `allocate_empty_primary` command corresponds to the old `allocate` command with `allow_primary` set to true. +==== Custom Reroute Commands + +Elasticsearch no longer supports plugins registering custom allocation +commands. It was unused and hopefully unneeded. + ==== `index.shared_filesystem.recover_on_any_node` changes The behavior of `index.shared_filesystem.recover_on_any_node: true` has been diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc index cdc471d19f2..da97d360b43 100644 --- a/docs/reference/migration/migrate_5_0/java.asciidoc +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -84,12 +84,14 @@ static factory methods in QueryBuilders accordingly. Making sure that query contains at least one clause by making initial clause mandatory in constructor. +Renaming method to add clauses from `clause(SpanQueryBuilder)` to `addClause(SpanQueryBuilder)`. ===== SpanNearQueryBuilder Removed setter for mandatory slop parameter, needs to be set in constructor now. Also making sure that query contains at least one clause by making initial clause mandatory in constructor. Updated the static factory methods in QueryBuilders accordingly. +Renaming method to add clauses from `clause(SpanQueryBuilder)` to `addClause(SpanQueryBuilder)`. ===== SpanNotQueryBuilder diff --git a/docs/reference/migration/migrate_5_0/mapping.asciidoc b/docs/reference/migration/migrate_5_0/mapping.asciidoc index 233ef84ae09..84dea729ec1 100644 --- a/docs/reference/migration/migrate_5_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_5_0/mapping.asciidoc @@ -20,7 +20,7 @@ values. For backwards compatibility purposes, during the 5.x series: String mappings now have the following default mappings: -[source,json] +[source,js] --------------- { "type": "text", @@ -135,7 +135,7 @@ will reject this option. Core types no longer support the object notation, which was used to provide per document boosts as follows: -[source,json] +[source,js] --------------- { "value": "field_value", diff --git a/docs/reference/migration/migrate_5_0/packaging.asciidoc b/docs/reference/migration/migrate_5_0/packaging.asciidoc index 5911b964b6b..977e20a76b1 100644 --- a/docs/reference/migration/migrate_5_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_5_0/packaging.asciidoc @@ -43,3 +43,15 @@ Previously, the scripts used to start Elasticsearch and run plugin commands only required a Bourne-compatible shell. Starting in Elasticsearch 5.0.0, the bash shell is now required and `/bin/bash` is a hard-dependency for the RPM and Debian packages. + +==== Environmental Settings + +Previously, Elasticsearch could be configured via environment variables +in two ways: first by using the placeholder syntax +`${env.ENV_VAR_NAME}` and the second by using the same syntax without +the `env` prefix: `${ENV_VAR_NAME}`. The first method has been removed +from Elasticsearch. + +Additionally, it was previously possible to set any setting in +Elasticsearch via JVM system properties. This has been removed from +Elasticsearch. diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc index 7a3fa97fe92..ae2057bddfb 100644 --- a/docs/reference/migration/migrate_5_0/percolator.asciidoc +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -46,4 +46,61 @@ the existing document. ==== Percolate Stats -Percolate stats have been replaced with `percolate` query cache stats in nodes stats and cluster stats APIs. \ No newline at end of file +The percolate stats have been removed. This is because the percolator no longer caches the percolator queries. + +==== Java client + +The percolator is no longer part of the core elasticsearch dependency. It has moved to the percolator module. +Therefor when using the percolator feature from the Java client the new percolator module should also be on the +classpath. Also the transport client should load the percolator module as plugin: + +[source,java] +-------------------------------------------------- +TransportClient transportClient = TransportClient.builder() + .settings(Settings.builder().put("node.name", "node")) + .addPlugin(PercolatorPlugin.class) + .build(); +transportClient.addTransportAddress( + new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) +); +-------------------------------------------------- + +The percolator and multi percolate related methods from the `Client` interface have been removed. These APIs have been +deprecated and it is recommended to use the `percolate` query in either the search or multi search APIs. However the +percolate and multi percolate APIs can still be used from the Java client. + +Using percolate request: + +[source,java] +-------------------------------------------------- +PercolateRequest request = new PercolateRequest(); +// set stuff and then execute: +PercolateResponse response = transportClient.execute(PercolateAction.INSTANCE, request).actionGet(); +-------------------------------------------------- + +Using percolate request builder: + +[source,java] +-------------------------------------------------- +PercolateRequestBuilder builder = new PercolateRequestBuilder(transportClient, PercolateAction.INSTANCE); +// set stuff and then execute: +PercolateResponse response = builder.get(); +-------------------------------------------------- + +Using multi percolate request: + +[source,java] +-------------------------------------------------- +MultiPercolateRequest request = new MultiPercolateRequest(); +// set stuff and then execute: +MultiPercolateResponse response = transportClient.execute(MultiPercolateAction.INSTANCE, request).get(); +-------------------------------------------------- + +Using multi percolate request builder: + +[source,java] +-------------------------------------------------- +MultiPercolateRequestBuilder builder = new MultiPercolateRequestBuilder(transportClient, MultiPercolateAction.INSTANCE); +// set stuff and then execute: +MultiPercolateResponse response = builder.get(); +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/plugins.asciidoc b/docs/reference/migration/migrate_5_0/plugins.asciidoc index 1ed45b9b596..ae1113caa48 100644 --- a/docs/reference/migration/migrate_5_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_5_0/plugins.asciidoc @@ -95,6 +95,11 @@ cloud: Cloud GCE plugin has been renamed to {plugins}/discovery-gce.html[Discovery GCE plugin]. +==== Delete-By-Query plugin removed + +The Delete-By-Query plugin has been removed in favor of a new <> +implementation in core. It now supports throttling, retries and cancellation but no longer supports timeouts. +Instead use the <> to cancel deletes that run too long. ==== Mapper Attachments plugin deprecated diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc index 200d76e45ca..23928551f77 100644 --- a/docs/reference/migration/migrate_5_0/rest.asciidoc +++ b/docs/reference/migration/migrate_5_0/rest.asciidoc @@ -57,3 +57,8 @@ removed in Elasticsearch 6.0.0. The deprecated `filters`/`token_filters`/`char_filters` parameter has been renamed `filter`/`token_filter`/`char_filter`. + +==== `DELETE /_query` endpoint removed + +The `DELETE /_query` endpoint provided by the Delete-By-Query plugin has been +removed and replaced by the <>. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 85895d65b67..0fa7d42e874 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -202,19 +202,14 @@ the cache implementation used for the request cache and the field data cache. ==== Using system properties to configure Elasticsearch -Elasticsearch can be configured by setting system properties on the -command line via `-Des.name.of.property=value.of.property`. This will be -removed in a future version of Elasticsearch. Instead, use -`-E es.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +Elasticsearch can no longer be configured by setting system properties. +Instead, use `-Ename.of.setting=value.of.setting`. ==== Removed using double-dashes to configure Elasticsearch Elasticsearch could previously be configured on the command line by setting settings via `--name.of.setting value.of.setting`. This feature -has been removed. Instead, use -`-Ees.name.of.setting=value.of.setting`. Note that in all cases the -name of the setting must be prefixed with `es.`. +has been removed. Instead, use `-Ename.of.setting=value.of.setting`. ==== Discovery Settings diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index 383252e23b3..f4e61fb0da1 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -21,7 +21,7 @@ attribute called `rack_id` -- we could use any attribute name. For example: [source,sh] ---------------------- -./bin/elasticsearch -Ees.node.attr.rack_id=rack_one <1> +./bin/elasticsearch -Enode.attr.rack_id=rack_one <1> ---------------------- <1> This setting could also be specified in the `elasticsearch.yml` config file. diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 124d68f1d6d..2f1caa42ad8 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -265,7 +265,7 @@ Like all node settings, it can also be specified on the command line as: [source,sh] ----------------------- -./bin/elasticsearch -Ees.path.data=/var/elasticsearch/data +./bin/elasticsearch -Epath.data=/var/elasticsearch/data ----------------------- TIP: When using the `.zip` or `.tar.gz` distributions, the `path.data` setting diff --git a/docs/reference/modules/scripting/security.asciidoc b/docs/reference/modules/scripting/security.asciidoc index 89065d86b5f..4d35b0da250 100644 --- a/docs/reference/modules/scripting/security.asciidoc +++ b/docs/reference/modules/scripting/security.asciidoc @@ -165,7 +165,7 @@ https://github.com/elastic/elasticsearch/blob/{branch}/core/src/main/java/org/el In a script, attempting to load a class that does not appear in the whitelist _may_ result in a `ClassNotFoundException`, for instance this script: -[source,json] +[source,js] ------------------------------ GET _search { @@ -179,7 +179,7 @@ GET _search will return the following exception: -[source,json] +[source,js] ------------------------------ { "reason": { @@ -207,7 +207,7 @@ use(groovy.time.TimeCategory); new Date(123456789).format('HH') Returns the following exception: -[source,json] +[source,js] ------------------------------ { "reason": { diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index 8fcd66129cd..2a289910c43 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -81,7 +81,7 @@ all documents where the `status` field contains the term `active`. This first query assigns a score of `0` to all documents, as no scoring query has been specified: -[source,json] +[source,js] --------------------------------- GET _search { @@ -101,7 +101,7 @@ GET _search This `bool` query has a `match_all` query, which assigns a score of `1.0` to all documents. -[source,json] +[source,js] --------------------------------- GET _search { @@ -125,7 +125,7 @@ This `constant_score` query behaves in exactly the same way as the second exampl The `constant_score` query assigns a score of `1.0` to all documents matched by the filter. -[source,json] +[source,js] --------------------------------- GET _search { diff --git a/docs/reference/query-dsl/boosting-query.asciidoc b/docs/reference/query-dsl/boosting-query.asciidoc index 969b3bbedfe..5bb07392ab7 100644 --- a/docs/reference/query-dsl/boosting-query.asciidoc +++ b/docs/reference/query-dsl/boosting-query.asciidoc @@ -8,19 +8,23 @@ overall score. [source,js] -------------------------------------------------- +GET /_search { - "boosting" : { - "positive" : { - "term" : { - "field1" : "value1" - } - }, - "negative" : { - "term" : { - "field2" : "value2" - } - }, - "negative_boost" : 0.2 + "query": { + "boosting" : { + "positive" : { + "term" : { + "field1" : "value1" + } + }, + "negative" : { + "term" : { + "field2" : "value2" + } + }, + "negative_boost" : 0.2 + } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index a956c33c1ee..fcc4ace2ec6 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -70,15 +70,19 @@ In this example, words that have a document frequency greater than 0.1% [source,js] -------------------------------------------------- +GET /_search { - "common": { - "body": { - "query": "this is bonsai cool", - "cutoff_frequency": 0.001 + "query": { + "common": { + "body": { + "query": "this is bonsai cool", + "cutoff_frequency": 0.001 + } + } } - } } -------------------------------------------------- +// CONSOLE The number of terms which should match can be controlled with the <> @@ -90,36 +94,44 @@ all terms required: [source,js] -------------------------------------------------- +GET /_search { - "common": { - "body": { - "query": "nelly the elephant as a cartoon", - "cutoff_frequency": 0.001, - "low_freq_operator": "and" + "query": { + "common": { + "body": { + "query": "nelly the elephant as a cartoon", + "cutoff_frequency": 0.001, + "low_freq_operator": "and" + } + } } - } } -------------------------------------------------- +// CONSOLE which is roughly equivalent to: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "must": [ - { "term": { "body": "nelly"}}, - { "term": { "body": "elephant"}}, - { "term": { "body": "cartoon"}} - ], - "should": [ - { "term": { "body": "the"}} - { "term": { "body": "as"}} - { "term": { "body": "a"}} - ] - } + "query": { + "bool": { + "must": [ + { "term": { "body": "nelly"}}, + { "term": { "body": "elephant"}}, + { "term": { "body": "cartoon"}} + ], + "should": [ + { "term": { "body": "the"}}, + { "term": { "body": "as"}}, + { "term": { "body": "a"}} + ] + } + } } -------------------------------------------------- +// CONSOLE Alternatively use <> @@ -128,41 +140,49 @@ must be present, for instance: [source,js] -------------------------------------------------- +GET /_search { - "common": { - "body": { - "query": "nelly the elephant as a cartoon", - "cutoff_frequency": 0.001, - "minimum_should_match": 2 + "query": { + "common": { + "body": { + "query": "nelly the elephant as a cartoon", + "cutoff_frequency": 0.001, + "minimum_should_match": 2 + } + } } - } } -------------------------------------------------- +// CONSOLE which is roughly equivalent to: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "must": { - "bool": { - "should": [ - { "term": { "body": "nelly"}}, - { "term": { "body": "elephant"}}, - { "term": { "body": "cartoon"}} - ], - "minimum_should_match": 2 - } - }, - "should": [ - { "term": { "body": "the"}} - { "term": { "body": "as"}} - { "term": { "body": "a"}} - ] - } + "query": { + "bool": { + "must": { + "bool": { + "should": [ + { "term": { "body": "nelly"}}, + { "term": { "body": "elephant"}}, + { "term": { "body": "cartoon"}} + ], + "minimum_should_match": 2 + } + }, + "should": [ + { "term": { "body": "the"}}, + { "term": { "body": "as"}}, + { "term": { "body": "a"}} + ] + } + } } -------------------------------------------------- +// CONSOLE minimum_should_match @@ -174,50 +194,58 @@ additional parameters (note the change in structure): [source,js] -------------------------------------------------- +GET /_search { - "common": { - "body": { - "query": "nelly the elephant not as a cartoon", - "cutoff_frequency": 0.001, - "minimum_should_match": { - "low_freq" : 2, - "high_freq" : 3 - } + "query": { + "common": { + "body": { + "query": "nelly the elephant not as a cartoon", + "cutoff_frequency": 0.001, + "minimum_should_match": { + "low_freq" : 2, + "high_freq" : 3 + } + } + } } - } } -------------------------------------------------- +// CONSOLE which is roughly equivalent to: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "must": { - "bool": { - "should": [ - { "term": { "body": "nelly"}}, - { "term": { "body": "elephant"}}, - { "term": { "body": "cartoon"}} - ], - "minimum_should_match": 2 - } - }, - "should": { - "bool": { - "should": [ - { "term": { "body": "the"}}, - { "term": { "body": "not"}}, - { "term": { "body": "as"}}, - { "term": { "body": "a"}} - ], - "minimum_should_match": 3 - } + "query": { + "bool": { + "must": { + "bool": { + "should": [ + { "term": { "body": "nelly"}}, + { "term": { "body": "elephant"}}, + { "term": { "body": "cartoon"}} + ], + "minimum_should_match": 2 + } + }, + "should": { + "bool": { + "should": [ + { "term": { "body": "the"}}, + { "term": { "body": "not"}}, + { "term": { "body": "as"}}, + { "term": { "body": "a"}} + ], + "minimum_should_match": 3 + } + } + } } - } } -------------------------------------------------- +// CONSOLE In this case it means the high frequency terms have only an impact on relevance when there are at least three of them. But the most @@ -227,36 +255,44 @@ for high frequency terms is when there are only high frequency terms: [source,js] -------------------------------------------------- +GET /_search { - "common": { - "body": { - "query": "how not to be", - "cutoff_frequency": 0.001, - "minimum_should_match": { - "low_freq" : 2, - "high_freq" : 3 - } + "query": { + "common": { + "body": { + "query": "how not to be", + "cutoff_frequency": 0.001, + "minimum_should_match": { + "low_freq" : 2, + "high_freq" : 3 + } + } + } } - } } -------------------------------------------------- +// CONSOLE which is roughly equivalent to: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "should": [ - { "term": { "body": "how"}}, - { "term": { "body": "not"}}, - { "term": { "body": "to"}}, - { "term": { "body": "be"}} - ], - "minimum_should_match": "3<50%" - } + "query": { + "bool": { + "should": [ + { "term": { "body": "how"}}, + { "term": { "body": "not"}}, + { "term": { "body": "to"}}, + { "term": { "body": "be"}} + ], + "minimum_should_match": "3<50%" + } + } } -------------------------------------------------- +// CONSOLE The high frequency generated query is then slightly less restrictive than with an `AND`. diff --git a/docs/reference/query-dsl/constant-score-query.asciidoc b/docs/reference/query-dsl/constant-score-query.asciidoc index 8e76ac13ff5..bced9fc9fbe 100644 --- a/docs/reference/query-dsl/constant-score-query.asciidoc +++ b/docs/reference/query-dsl/constant-score-query.asciidoc @@ -7,12 +7,16 @@ filter. Maps to Lucene `ConstantScoreQuery`. [source,js] -------------------------------------------------- +GET /_search { - "constant_score" : { - "filter" : { - "term" : { "user" : "kimchy"} - }, - "boost" : 1.2 + "query": { + "constant_score" : { + "filter" : { + "term" : { "user" : "kimchy"} + }, + "boost" : 1.2 + } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/dis-max-query.asciidoc b/docs/reference/query-dsl/dis-max-query.asciidoc index 2938c8db8ea..2f82f2294b3 100644 --- a/docs/reference/query-dsl/dis-max-query.asciidoc +++ b/docs/reference/query-dsl/dis-max-query.asciidoc @@ -27,18 +27,22 @@ This query maps to Lucene `DisjunctionMaxQuery`. [source,js] -------------------------------------------------- +GET /_search { - "dis_max" : { - "tie_breaker" : 0.7, - "boost" : 1.2, - "queries" : [ - { - "term" : { "age" : 34 } - }, - { - "term" : { "age" : 35 } - } - ] + "query": { + "dis_max" : { + "tie_breaker" : 0.7, + "boost" : 1.2, + "queries" : [ + { + "term" : { "age" : 34 } + }, + { + "term" : { "age" : 35 } + } + ] + } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index b484d47f4b6..4971219366f 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -5,10 +5,14 @@ Returns documents that have at least one non-`null` value in the original field: [source,js] -------------------------------------------------- +GET /_search { - "exists" : { "field" : "user" } + "query": { + "exists" : { "field" : "user" } + } } -------------------------------------------------- +// CONSOLE For instance, these documents would all match the above query: @@ -77,14 +81,20 @@ clause as follows: [source,js] -------------------------------------------------- -"bool": { - "must_not": { - "exists": { - "field": "user" +GET /_search +{ + "query": { + "bool": { + "must_not": { + "exists": { + "field": "user" + } + } } } } -------------------------------------------------- +// CONSOLE This query returns documents that have no value in the user field. diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 5f92d8c172c..e7e4b8f5877 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -14,13 +14,20 @@ by the query. [source,js] -------------------------------------------------- -"function_score": { - "query": {}, - "boost": "boost for the whole query", - "FUNCTION": {}, <1> - "boost_mode":"(multiply|replace|...)" +GET /_search +{ + "query": { + "function_score": { + "query": {}, + "boost": "5", + "random_score": {}, <1> + "boost_mode":"multiply" + } + } } -------------------------------------------------- +// CONSOLE + <1> See <> for a list of supported functions. Furthermore, several functions can be combined. In this case one can @@ -29,30 +36,35 @@ given filtering query [source,js] -------------------------------------------------- -"function_score": { - "query": {}, - "boost": "boost for the whole query", - "functions": [ - { - "filter": {}, - "FUNCTION": {}, <1> - "weight": number - }, - { - "FUNCTION": {} <1> - }, - { - "filter": {}, - "weight": number +GET /_search +{ + "query": { + "function_score": { + "query": {}, + "boost": "5", <1> + "functions": [ + { + "filter": {}, + "random_score": {}, <2> + "weight": 23 + }, + { + "filter": {}, + "weight": 42 + } + ], + "max_boost": 42, + "score_mode": "max", + "boost_mode": "multiply", + "min_score" : 42 } - ], - "max_boost": number, - "score_mode": "(multiply|max|...)", - "boost_mode": "(multiply|replace|...)", - "min_score" : number + } } -------------------------------------------------- -<1> See <> for a list of supported functions. +// CONSOLE + +<1> Boost for the whole query. +<2> See <> for a list of supported functions. NOTE: The scores produced by the filtering query of each function do not matter. @@ -459,36 +471,36 @@ the request would look like this: [source,js] -------------------------------------------------- -GET _search +GET /_search { - "query": { - "function_score": { - "functions": [ - { - "gauss": { - "price": { - "origin": "0", - "scale": "20" + "query": { + "function_score": { + "functions": [ + { + "gauss": { + "price": { + "origin": "0", + "scale": "20" + } + } + }, + { + "gauss": { + "location": { + "origin": "11, 12", + "scale": "2km" + } + } } - } - }, - { - "gauss": { - "location": { - "origin": "11, 12", - "scale": "2km" + ], + "query": { + "match": { + "properties": "balcony" } - } + }, + "score_mode": "multiply" } - ], - "query": { - "match": { - "properties": "balcony" - } - }, - "score_mode": "multiply" } - } } -------------------------------------------------- // CONSOLE diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index 48c9c45c0ff..f320e81b579 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -16,27 +16,35 @@ Here is a simple example: [source,js] -------------------------------------------------- +GET /_search { - "fuzzy" : { "user" : "ki" } + "query": { + "fuzzy" : { "user" : "ki" } + } } -------------------------------------------------- +// CONSOLE Or with more advanced settings: [source,js] -------------------------------------------------- +GET /_search { - "fuzzy" : { - "user" : { - "value" : "ki", - "boost" : 1.0, - "fuzziness" : 2, - "prefix_length" : 0, - "max_expansions": 100 + "query": { + "fuzzy" : { + "user" : { + "value" : "ki", + "boost" : 1.0, + "fuzziness" : 2, + "prefix_length" : 0, + "max_expansions": 100 + } } } } -------------------------------------------------- +// CONSOLE [float] ===== Parameters @@ -62,3 +70,4 @@ WARNING: This query can be very heavy if `prefix_length` is set to `0` and if `max_expansions` is set to a high number. It could result in every term in the index being examined! + diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index 00120a98fc9..40debb57105 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -6,6 +6,24 @@ bounding box. Assuming the following indexed document: [source,js] -------------------------------------------------- +PUT /my_locations +{ + "mappings": { + "location": { + "properties": { + "pin": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } + } + } +} + +PUT /my_locations/location/1 { "pin" : { "location" : { @@ -15,27 +33,32 @@ bounding box. Assuming the following indexed document: } } -------------------------------------------------- +// CONSOLE +// TESTSETUP Then the following simple query can be executed with a `geo_bounding_box` filter: [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : { - "lat" : 40.73, - "lon" : -74.1 - }, - "bottom_right" : { - "lat" : 40.01, - "lon" : -71.12 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : { + "lat" : 40.73, + "lon" : -74.1 + }, + "bottom_right" : { + "lat" : 40.01, + "lon" : -71.12 + } } } } @@ -43,6 +66,7 @@ Then the following simple query can be executed with a } } -------------------------------------------------- +// CONSOLE [float] ==== Query Options @@ -75,21 +99,24 @@ representation of the geo point, the filter can accept it as well: [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : { - "lat" : 40.73, - "lon" : -74.1 - }, - "bottom_right" : { - "lat" : 40.01, - "lon" : -71.12 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : { + "lat" : 40.73, + "lon" : -74.1 + }, + "bottom_right" : { + "lat" : 40.01, + "lon" : -71.12 + } } } } @@ -97,6 +124,7 @@ representation of the geo point, the filter can accept it as well: } } -------------------------------------------------- +// CONSOLE [float] ===== Lat Lon As Array @@ -106,22 +134,26 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : [-74.1, 40.73], - "bottom_right" : [-71.12, 40.01] + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : [-74.1, 40.73], + "bottom_right" : [-71.12, 40.01] + } } } } } } -------------------------------------------------- +// CONSOLE [float] ===== Lat Lon As String @@ -130,44 +162,52 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : "40.73, -74.1", - "bottom_right" : "40.01, -71.12" + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : "40.73, -74.1", + "bottom_right" : "40.01, -71.12" + } } } - } } } +} -------------------------------------------------- +// CONSOLE [float] ===== Geohash [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : "dr5r9ydj2y73", - "bottom_right" : "drj7teegpus6" + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : "dr5r9ydj2y73", + "bottom_right" : "drj7teegpus6" + } } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Vertices @@ -181,24 +221,28 @@ values separately. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top" : 40.73, - "left" : -74.1, - "bottom" : 40.01, - "right" : -71.12 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top" : 40.73, + "left" : -74.1, + "bottom" : 40.01, + "right" : -71.12 + } } } } } } -------------------------------------------------- +// CONSOLE [float] @@ -227,29 +271,33 @@ are not supported. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_bounding_box" : { - "pin.location" : { - "top_left" : { - "lat" : 40.73, - "lon" : -74.1 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_bounding_box" : { + "pin.location" : { + "top_left" : { + "lat" : 40.73, + "lon" : -74.1 + }, + "bottom_right" : { + "lat" : 40.10, + "lon" : -71.12 + } }, - "bottom_right" : { - "lat" : 40.10, - "lon" : -71.12 - } - }, - "type" : "indexed" + "type" : "indexed" + } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Ignore Unmapped diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index c29391e5b57..5e2f0ce43c7 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -2,10 +2,29 @@ === Geo Distance Query Filters documents that include only hits that exists within a specific -distance from a geo point. Assuming the following indexed json: +distance from a geo point. Assuming the following mapping and indexed +document: [source,js] -------------------------------------------------- +PUT /my_locations +{ + "mappings": { + "location": { + "properties": { + "pin": { + "properties": { + "location": { + "type": "geo_point" + } + } + } + } + } + } +} + +PUT /my_locations/location/1 { "pin" : { "location" : { @@ -15,29 +34,36 @@ distance from a geo point. Assuming the following indexed json: } } -------------------------------------------------- +// CONSOLE +// TESTSETUP + Then the following simple query can be executed with a `geo_distance` filter: [source,js] -------------------------------------------------- +GET /my_locations/location/_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance" : { - "distance" : "200km", - "pin.location" : { - "lat" : 40, - "lon" : -70 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance" : { + "distance" : "200km", + "pin.location" : { + "lat" : 40, + "lon" : -70 + } } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Accepted Formats @@ -50,23 +76,27 @@ representation of the geo point, the filter can accept it as well: [source,js] -------------------------------------------------- +GET /my_locations/location/_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance" : { - "distance" : "12km", - "pin.location" : { - "lat" : 40, - "lon" : -70 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance" : { + "distance" : "12km", + "pin.location" : { + "lat" : 40, + "lon" : -70 + } } } } } } -------------------------------------------------- +// CONSOLE [float] ===== Lat Lon As Array @@ -76,20 +106,25 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- +GET /my_locations/location/_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance" : { - "distance" : "12km", - "pin.location" : [-70, 40] + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance" : { + "distance" : "12km", + "pin.location" : [-70, 40] + } } } } } -------------------------------------------------- +// CONSOLE + [float] ===== Lat Lon As String @@ -98,40 +133,48 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- +GET /my_locations/location/_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance" : { - "distance" : "12km", - "pin.location" : "40,-70" + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance" : { + "distance" : "12km", + "pin.location" : "40,-70" + } } } } } -------------------------------------------------- +// CONSOLE [float] ===== Geohash [source,js] -------------------------------------------------- +GET /my_locations/location/_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance" : { - "distance" : "12km", - "pin.location" : "drm3btev3e86" + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance" : { + "distance" : "12km", + "pin.location" : "drm3btev3e86" + } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Options diff --git a/docs/reference/query-dsl/geo-distance-range-query.asciidoc b/docs/reference/query-dsl/geo-distance-range-query.asciidoc index abf557d68c7..c54c50638bc 100644 --- a/docs/reference/query-dsl/geo-distance-range-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-range-query.asciidoc @@ -5,24 +5,28 @@ Filters documents that exists within a range from a specific point: [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance_range" : { - "from" : "200km", - "to" : "400km", - "pin.location" : { - "lat" : 40, - "lon" : -70 + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_distance_range" : { + "from" : "200km", + "to" : "400km", + "pin.location" : { + "lat" : 40, + "lon" : -70 + } } } } } } -------------------------------------------------- +// CONSOLE Supports the same point location parameter and query options as the <> diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index 5717c5dc924..35278fda4a5 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -6,25 +6,29 @@ points. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "query" : { - "match_all" : {} - }, - "filter" : { - "geo_polygon" : { - "person.location" : { - "points" : [ + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_polygon" : { + "person.location" : { + "points" : [ {"lat" : 40, "lon" : -70}, {"lat" : 30, "lon" : -80}, {"lat" : 20, "lon" : -90} - ] + ] + } } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Query Options @@ -53,25 +57,29 @@ conform with http://geojson.org/[GeoJSON]. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_polygon" : { - "person.location" : { - "points" : [ - [-70, 40], + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_polygon" : { + "person.location" : { + "points" : [ + [-70, 40], [-80, 30], - [-90, 20] - ] + [-90, 20] + ] + } } } } } } -------------------------------------------------- +// CONSOLE [float] ===== Lat Lon as String @@ -80,50 +88,58 @@ Format in `lat,lon`. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_polygon" : { - "person.location" : { - "points" : [ - "40, -70", - "30, -80", - "20, -90" - ] + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_polygon" : { + "person.location" : { + "points" : [ + "40, -70", + "30, -80", + "20, -90" + ] + } } } } } } -------------------------------------------------- +// CONSOLE [float] ===== Geohash [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_polygon" : { - "person.location" : { - "points" : [ - "drn5x1g8cu2y", - "30, -80", - "20, -90" - ] + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geo_polygon" : { + "person.location" : { + "points" : [ + "drn5x1g8cu2y", + "30, -80", + "20, -90" + ] + } } } } } } -------------------------------------------------- +// CONSOLE [float] ==== geo_point Type diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index a892080dda4..12203061336 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -26,10 +26,10 @@ Given a document that looks like this: -------------------------------------------------- { "name": "Wind & Wetter, Berlin, Germany", - "location": { - "type": "Point", - "coordinates": [13.400544, 52.530286] - } + "location": { + "type": "Point", + "coordinates": [13.400544, 52.530286] + } } -------------------------------------------------- @@ -38,6 +38,7 @@ The following query will find the point using the Elasticsearch's [source,js] -------------------------------------------------- +GET /_search { "query":{ "bool": { @@ -59,6 +60,7 @@ The following query will find the point using the Elasticsearch's } } -------------------------------------------------- +// CONSOLE ==== Pre-Indexed Shape @@ -81,26 +83,30 @@ shape: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "must": { - "match_all": {} - }, - "filter": { - "geo_shape": { - "location": { - "indexed_shape": { - "id": "DEU", - "type": "countries", - "index": "shapes", - "path": "location" + "query": { + "bool": { + "must": { + "match_all": {} + }, + "filter": { + "geo_shape": { + "location": { + "indexed_shape": { + "id": "DEU", + "type": "countries", + "index": "shapes", + "path": "location" + } + } } } - } } } } -------------------------------------------------- +// CONSOLE ==== Spatial Relations diff --git a/docs/reference/query-dsl/geohash-cell-query.asciidoc b/docs/reference/query-dsl/geohash-cell-query.asciidoc index 2dd701fd958..27e6319bc71 100644 --- a/docs/reference/query-dsl/geohash-cell-query.asciidoc +++ b/docs/reference/query-dsl/geohash-cell-query.asciidoc @@ -13,6 +13,7 @@ setting the `geohash_prefix` option: [source,js] -------------------------------------------------- +PUT /my_index { "mappings" : { "location": { @@ -28,6 +29,8 @@ setting the `geohash_prefix` option: } } -------------------------------------------------- +// CONSOLE +// TESTSETUP The geohash cell can defined by all formats of `geo_points`. If such a cell is defined by a latitude and longitude pair the size of the cell needs to be @@ -42,24 +45,28 @@ next to the given cell. [source,js] -------------------------------------------------- +GET /_search { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geohash_cell": { - "pin": { - "lat": 13.4080, - "lon": 52.5186 - }, - "precision": 3, - "neighbors": true + "query": { + "bool" : { + "must" : { + "match_all" : {} + }, + "filter" : { + "geohash_cell": { + "pin": { + "lat": 13.4080, + "lon": 52.5186 + }, + "precision": 3, + "neighbors": true + } } } } } -------------------------------------------------- +// CONSOLE [float] ==== Ignore Unmapped diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index bfadc33c06c..3b8352fd4e0 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -7,17 +7,21 @@ an example: [source,js] -------------------------------------------------- +GET /_search { - "has_child" : { - "type" : "blog_tag", - "query" : { - "term" : { - "tag" : "something" - } + "query": { + "has_child" : { + "type" : "blog_tag", + "query" : { + "term" : { + "tag" : "something" + } + } } } } -------------------------------------------------- +// CONSOLE [float] ==== Scoring capabilities @@ -32,18 +36,22 @@ inside the `has_child` query: [source,js] -------------------------------------------------- +GET /_search { - "has_child" : { - "type" : "blog_tag", - "score_mode" : "min", - "query" : { - "term" : { - "tag" : "something" - } + "query": { + "has_child" : { + "type" : "blog_tag", + "score_mode" : "min", + "query" : { + "term" : { + "tag" : "something" + } + } } } } -------------------------------------------------- +// CONSOLE [float] ==== Min/Max Children @@ -54,20 +62,24 @@ a match: [source,js] -------------------------------------------------- +GET /_search { - "has_child" : { - "type" : "blog_tag", - "score_mode" : "min", - "min_children": 2, <1> - "max_children": 10, <1> - "query" : { - "term" : { - "tag" : "something" + "query": { + "has_child" : { + "type" : "blog_tag", + "score_mode" : "min", + "min_children": 2, <1> + "max_children": 10, <1> + "query" : { + "term" : { + "tag" : "something" + } } } } } -------------------------------------------------- +// CONSOLE <1> Both `min_children` and `max_children` are optional. The `min_children` and `max_children` parameters can be combined with diff --git a/docs/reference/query-dsl/has-parent-query.asciidoc b/docs/reference/query-dsl/has-parent-query.asciidoc index 9896be6a0a5..202bcaac432 100644 --- a/docs/reference/query-dsl/has-parent-query.asciidoc +++ b/docs/reference/query-dsl/has-parent-query.asciidoc @@ -9,17 +9,21 @@ in the same manner as the `has_child` query. [source,js] -------------------------------------------------- +GET /_search { - "has_parent" : { - "parent_type" : "blog", - "query" : { - "term" : { - "tag" : "something" - } + "query": { + "has_parent" : { + "parent_type" : "blog", + "query" : { + "term" : { + "tag" : "something" + } + } } } } -------------------------------------------------- +// CONSOLE [float] ==== Scoring capabilities @@ -34,18 +38,22 @@ matching parent document. The score mode can be specified with the [source,js] -------------------------------------------------- +GET /_search { - "has_parent" : { - "parent_type" : "blog", - "score" : true, - "query" : { - "term" : { - "tag" : "something" - } + "query": { + "has_parent" : { + "parent_type" : "blog", + "score" : true, + "query" : { + "term" : { + "tag" : "something" + } + } } } } -------------------------------------------------- +// CONSOLE [float] ==== Ignore Unmapped diff --git a/docs/reference/query-dsl/ids-query.asciidoc b/docs/reference/query-dsl/ids-query.asciidoc index 7d08243a78f..09541ce51d3 100644 --- a/docs/reference/query-dsl/ids-query.asciidoc +++ b/docs/reference/query-dsl/ids-query.asciidoc @@ -6,13 +6,17 @@ uses the <> field. [source,js] -------------------------------------------------- +GET /_search { - "ids" : { - "type" : "my_type", - "values" : ["1", "4", "100"] + "query": { + "ids" : { + "type" : "my_type", + "values" : ["1", "4", "100"] + } } } -------------------------------------------------- +// CONSOLE The `type` is optional and can be omitted, and can also accept an array of values. If no type is specified, all types defined in the index mapping are tried. diff --git a/docs/reference/query-dsl/indices-query.asciidoc b/docs/reference/query-dsl/indices-query.asciidoc index e3b604b7a39..8f2f958086e 100644 --- a/docs/reference/query-dsl/indices-query.asciidoc +++ b/docs/reference/query-dsl/indices-query.asciidoc @@ -9,18 +9,18 @@ on the list, the alternative `no_match_query` is executed. [source,js] -------------------------------------------------- +GET /_search { - "indices" : { - "indices" : ["index1", "index2"], - "query" : { - "term" : { "tag" : "wow" } - }, - "no_match_query" : { - "term" : { "tag" : "kow" } + "query": { + "indices" : { + "indices" : ["index1", "index2"], + "query" : { "term" : { "tag" : "wow" } }, + "no_match_query" : { "term" : { "tag" : "kow" } } } } } -------------------------------------------------- +// CONSOLE You can use the `index` field to provide a single index. diff --git a/docs/reference/query-dsl/match-all-query.asciidoc b/docs/reference/query-dsl/match-all-query.asciidoc index d46b08f9e55..6e448828676 100644 --- a/docs/reference/query-dsl/match-all-query.asciidoc +++ b/docs/reference/query-dsl/match-all-query.asciidoc @@ -6,15 +6,27 @@ of `1.0`. [source,js] -------------------------------------------------- -{ "match_all": {} } +GET /_search +{ + "query": { + "match_all": {} + } +} -------------------------------------------------- +// CONSOLE The `_score` can be changed with the `boost` parameter: [source,js] -------------------------------------------------- -{ "match_all": { "boost" : 1.2 }} +GET /_search +{ + "query": { + "match_all": { "boost" : 1.2 } + } +} -------------------------------------------------- +// CONSOLE [[query-dsl-match-none-query]] [float] @@ -24,5 +36,11 @@ This is the inverse of the `match_all` query, which matches no documents. [source,js] -------------------------------------------------- -{ "match_none": {} } +GET /_search +{ + "query": { + "match_none": {} + } +} -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/match-phrase-prefix-query.asciidoc b/docs/reference/query-dsl/match-phrase-prefix-query.asciidoc index b715bf8bd6d..e7e0f618652 100644 --- a/docs/reference/query-dsl/match-phrase-prefix-query.asciidoc +++ b/docs/reference/query-dsl/match-phrase-prefix-query.asciidoc @@ -6,12 +6,16 @@ allows for prefix matches on the last term in the text. For example: [source,js] -------------------------------------------------- +GET /_search { - "match_phrase_prefix" : { - "message" : "quick brown f" + "query": { + "match_phrase_prefix" : { + "message" : "quick brown f" + } } } -------------------------------------------------- +// CONSOLE It accepts the same parameters as the phrase type. In addition, it also accepts a `max_expansions` parameter (default `50`) that can control to how @@ -21,15 +25,19 @@ example: [source,js] -------------------------------------------------- +GET /_search { - "match_phrase_prefix" : { - "message" : { - "query" : "quick brown f", - "max_expansions" : 10 + "query": { + "match_phrase_prefix" : { + "message" : { + "query" : "quick brown f", + "max_expansions" : 10 + } } } } -------------------------------------------------- +// CONSOLE [IMPORTANT] =================================================== @@ -53,4 +61,4 @@ For better solutions for _search-as-you-type_ see the <> and {guide}/_index_time_search_as_you_type.html[Index-Time Search-as-You-Type]. -=================================================== \ No newline at end of file +=================================================== diff --git a/docs/reference/query-dsl/match-phrase-query.asciidoc b/docs/reference/query-dsl/match-phrase-query.asciidoc index 105866be12a..943d0e84d36 100644 --- a/docs/reference/query-dsl/match-phrase-query.asciidoc +++ b/docs/reference/query-dsl/match-phrase-query.asciidoc @@ -6,12 +6,16 @@ out of the analyzed text. For example: [source,js] -------------------------------------------------- +GET /_search { - "match_phrase" : { - "message" : "this is a test" + "query": { + "match_phrase" : { + "message" : "this is a test" + } } } -------------------------------------------------- +// CONSOLE A phrase query matches terms up to a configurable `slop` (which defaults to 0) in any order. Transposed terms have a slop of 2. @@ -22,12 +26,16 @@ definition, or the default search analyzer, for example: [source,js] -------------------------------------------------- +GET /_search { - "match_phrase" : { - "message" : { - "query" : "this is a test", - "analyzer" : "my_analyzer" + "query": { + "match_phrase" : { + "message" : { + "query" : "this is a test", + "analyzer" : "my_analyzer" + } } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index 3dbb60d1ca2..c0081f1de9a 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -7,12 +7,16 @@ them, and constructs a query. For example: [source,js] -------------------------------------------------- +GET /_search { - "match" : { - "message" : "this is a test" + "query": { + "match" : { + "message" : "this is a test" + } } } -------------------------------------------------- +// CONSOLE Note, `message` is the name of a field, you can substitute the name of any field (including `_all`) instead. @@ -57,15 +61,19 @@ change in structure, `message` is the field name): [source,js] -------------------------------------------------- +GET /_search { - "match" : { - "message" : { - "query" : "this is a test", - "operator" : "and" + "query": { + "match" : { + "message" : { + "query" : "this is a test", + "operator" : "and" + } } } } -------------------------------------------------- +// CONSOLE [[query-dsl-match-query-zero]] ===== Zero terms query @@ -76,16 +84,20 @@ change that the `zero_terms_query` option can be used, which accepts [source,js] -------------------------------------------------- +GET /_search { - "match" : { - "message" : { - "query" : "to be or not to be", - "operator" : "and", - "zero_terms_query": "all" + "query": { + "match" : { + "message" : { + "query" : "to be or not to be", + "operator" : "and", + "zero_terms_query": "all" + } } } } -------------------------------------------------- +// CONSOLE [[query-dsl-match-query-cutoff]] ===== Cutoff frequency @@ -113,16 +125,19 @@ Here is an example showing a query composed of stopwords exclusively: [source,js] -------------------------------------------------- +GET /_search { - "match" : { - "message" : { - "query" : "to be or not to be", - "cutoff_frequency" : 0.001 + "query": { + "match" : { + "message" : { + "query" : "to be or not to be", + "cutoff_frequency" : 0.001 + } } } } -------------------------------------------------- - +// CONSOLE IMPORTANT: The `cutoff_frequency` option operates on a per-shard-level. This means that when trying it out on test indexes with low document numbers you diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index dc525c2d311..b132b49f234 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -15,15 +15,19 @@ fields, limiting the number of selected terms to 12. [source,js] -------------------------------------------------- +GET /_search { - "more_like_this" : { - "fields" : ["title", "description"], - "like" : "Once upon a time", - "min_term_freq" : 1, - "max_query_terms" : 12 + "query": { + "more_like_this" : { + "fields" : ["title", "description"], + "like" : "Once upon a time", + "min_term_freq" : 1, + "max_query_terms" : 12 + } } } -------------------------------------------------- +// CONSOLE A more complicated use case consists of mixing texts with documents already existing in the index. In this case, the syntax to specify a document is @@ -31,27 +35,31 @@ similar to the one used in the <>. [source,js] -------------------------------------------------- +GET /_search { - "more_like_this" : { - "fields" : ["title", "description"], - "like" : [ - { - "_index" : "imdb", - "_type" : "movies", - "_id" : "1" - }, - { - "_index" : "imdb", - "_type" : "movies", - "_id" : "2" - }, - "and potentially some more text here as well" - ], - "min_term_freq" : 1, - "max_query_terms" : 12 + "query": { + "more_like_this" : { + "fields" : ["title", "description"], + "like" : [ + { + "_index" : "imdb", + "_type" : "movies", + "_id" : "1" + }, + { + "_index" : "imdb", + "_type" : "movies", + "_id" : "2" + }, + "and potentially some more text here as well" + ], + "min_term_freq" : 1, + "max_query_terms" : 12 + } } } -------------------------------------------------- +// CONSOLE Finally, users can mix some texts, a chosen set of documents but also provide documents not necessarily present in the index. To provide documents not @@ -59,32 +67,36 @@ present in the index, the syntax is similar to < - "fields": [ "subject", "message" ] <2> + "query": { + "multi_match" : { + "query": "this is a test", <1> + "fields": [ "subject", "message" ] <2> + } } } -------------------------------------------------- +// CONSOLE <1> The query string. <2> The fields to be queried. @@ -23,26 +27,35 @@ Fields can be specified with wildcards, eg: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "Will Smith", - "fields": [ "title", "*_name" ] <1> + "query": { + "multi_match" : { + "query": "Will Smith", + "fields": [ "title", "*_name" ] <1> + } } } -------------------------------------------------- +// CONSOLE <1> Query the `title`, `first_name` and `last_name` fields. Individual fields can be boosted with the caret (`^`) notation: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query" : "this is a test", - "fields" : [ "subject^3", "message" ] <1> + "query": { + "multi_match" : { + "query" : "this is a test", + "fields" : [ "subject^3", "message" ] <1> + } } } -------------------------------------------------- +// CONSOLE + <1> The `subject` field is three times as important as the `message` field. [[multi-match-types]] @@ -82,30 +95,38 @@ find the single best matching field. For instance, this query: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "brown fox", - "type": "best_fields", - "fields": [ "subject", "message" ], - "tie_breaker": 0.3 + "query": { + "multi_match" : { + "query": "brown fox", + "type": "best_fields", + "fields": [ "subject", "message" ], + "tie_breaker": 0.3 + } } } -------------------------------------------------- +// CONSOLE would be executed as: [source,js] -------------------------------------------------- +GET /_search { - "dis_max": { - "queries": [ - { "match": { "subject": "brown fox" }}, - { "match": { "message": "brown fox" }} - ], - "tie_breaker": 0.3 + "query": { + "dis_max": { + "queries": [ + { "match": { "subject": "brown fox" }}, + { "match": { "message": "brown fox" }} + ], + "tie_breaker": 0.3 + } } } -------------------------------------------------- +// CONSOLE Normally the `best_fields` type uses the score of the *single* best matching field, but if `tie_breaker` is specified, then it calculates the score as @@ -132,15 +153,20 @@ Take this query for example: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "Will Smith", - "type": "best_fields", - "fields": [ "first_name", "last_name" ], - "operator": "and" <1> + "query": { + "multi_match" : { + "query": "Will Smith", + "type": "best_fields", + "fields": [ "first_name", "last_name" ], + "operator": "and" <1> + } } } -------------------------------------------------- +// CONSOLE + <1> All terms must be present. This query is executed as: @@ -170,29 +196,37 @@ This query: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "quick brown fox", - "type": "most_fields", - "fields": [ "title", "title.original", "title.shingles" ] + "query": { + "multi_match" : { + "query": "quick brown fox", + "type": "most_fields", + "fields": [ "title", "title.original", "title.shingles" ] + } } } -------------------------------------------------- +// CONSOLE would be executed as: [source,js] -------------------------------------------------- +GET /_search { - "bool": { - "should": [ - { "match": { "title": "quick brown fox" }}, - { "match": { "title.original": "quick brown fox" }}, - { "match": { "title.shingles": "quick brown fox" }} - ] + "query": { + "bool": { + "should": [ + { "match": { "title": "quick brown fox" }}, + { "match": { "title.original": "quick brown fox" }}, + { "match": { "title.shingles": "quick brown fox" }} + ] + } } } -------------------------------------------------- +// CONSOLE The score from each `match` clause is added together, then divided by the number of `match` clauses. @@ -212,28 +246,36 @@ but they use a `match_phrase` or `match_phrase_prefix` query instead of a This query: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "quick brown f", - "type": "phrase_prefix", - "fields": [ "subject", "message" ] + "query": { + "multi_match" : { + "query": "quick brown f", + "type": "phrase_prefix", + "fields": [ "subject", "message" ] + } } } -------------------------------------------------- +// CONSOLE would be executed as: [source,js] -------------------------------------------------- +GET /_search { - "dis_max": { - "queries": [ - { "match_phrase_prefix": { "subject": "quick brown f" }}, - { "match_phrase_prefix": { "message": "quick brown f" }} - ] + "query": { + "dis_max": { + "queries": [ + { "match_phrase_prefix": { "subject": "quick brown f" }}, + { "match_phrase_prefix": { "message": "quick brown f" }} + ] + } } } -------------------------------------------------- +// CONSOLE Also, accepts `analyzer`, `boost`, `slop` and `zero_terms_query` as explained in <>. Type `phrase_prefix` additionally accepts @@ -288,15 +330,19 @@ A query like: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "Will Smith", - "type": "cross_fields", - "fields": [ "first_name", "last_name" ], - "operator": "and" + "query": { + "multi_match" : { + "query": "Will Smith", + "type": "cross_fields", + "fields": [ "first_name", "last_name" ], + "operator": "and" + } } } -------------------------------------------------- +// CONSOLE is executed as: @@ -344,17 +390,21 @@ both use an `edge_ngram` analyzer, this query: [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "Jon", - "type": "cross_fields", - "fields": [ + "query": { + "multi_match" : { + "query": "Jon", + "type": "cross_fields", + "fields": [ "first", "first.edge", "last", "last.edge" - ] + ] + } } } -------------------------------------------------- +// CONSOLE would be executed as: @@ -379,28 +429,33 @@ parameter to just one of them: [source,js] -------------------------------------------------- +GET /_search { + "query": { "bool": { - "should": [ - { - "multi_match" : { - "query": "Will Smith", - "type": "cross_fields", - "fields": [ "first", "last" ], - "minimum_should_match": "50%" <1> - } - }, - { - "multi_match" : { - "query": "Will Smith", - "type": "cross_fields", - "fields": [ "*.edge" ] - } - } - ] + "should": [ + { + "multi_match" : { + "query": "Will Smith", + "type": "cross_fields", + "fields": [ "first", "last" ], + "minimum_should_match": "50%" <1> + } + }, + { + "multi_match" : { + "query": "Will Smith", + "type": "cross_fields", + "fields": [ "*.edge" ] + } + } + ] } + } } -------------------------------------------------- +// CONSOLE + <1> Either `will` or `smith` must be present in either of the `first` or `last` fields @@ -409,15 +464,20 @@ parameter in the query. [source,js] -------------------------------------------------- +GET /_search { - "multi_match" : { - "query": "Jon", - "type": "cross_fields", - "analyzer": "standard", <1> - "fields": [ "first", "last", "*.edge" ] + "query": { + "multi_match" : { + "query": "Jon", + "type": "cross_fields", + "analyzer": "standard", <1> + "fields": [ "first", "last", "*.edge" ] + } } } -------------------------------------------------- +// CONSOLE + <1> Use the `standard` analyzer for all fields. which will be executed as: diff --git a/docs/reference/query-dsl/nested-query.asciidoc b/docs/reference/query-dsl/nested-query.asciidoc index 0b861509c0e..6e990e07f91 100644 --- a/docs/reference/query-dsl/nested-query.asciidoc +++ b/docs/reference/query-dsl/nested-query.asciidoc @@ -10,40 +10,47 @@ will work with: [source,js] -------------------------------------------------- +PUT /my_index { - "type1" : { - "properties" : { - "obj1" : { - "type" : "nested" + "mappings": { + "type1" : { + "properties" : { + "obj1" : { + "type" : "nested" + } } } } } + +GET _cluster/health?wait_for_status=yellow -------------------------------------------------- +// CONSOLE +// TESTSETUP And here is a sample nested query usage: [source,js] -------------------------------------------------- +GET /_search { - "nested" : { - "path" : "obj1", - "score_mode" : "avg", - "query" : { - "bool" : { - "must" : [ - { - "match" : {"obj1.name" : "blue"} - }, - { - "range" : {"obj1.count" : {"gt" : 5}} - } - ] + "query": { + "nested" : { + "path" : "obj1", + "score_mode" : "avg", + "query" : { + "bool" : { + "must" : [ + { "match" : {"obj1.name" : "blue"} }, + { "range" : {"obj1.count" : {"gt" : 5}} } + ] + } } } } } -------------------------------------------------- +// CONSOLE The query `path` points to the nested object path, and the `query` includes the query that will run on the nested docs matching the diff --git a/docs/reference/query-dsl/parent-id-query.asciidoc b/docs/reference/query-dsl/parent-id-query.asciidoc index a29073dafa9..8ea07a6d0b7 100644 --- a/docs/reference/query-dsl/parent-id-query.asciidoc +++ b/docs/reference/query-dsl/parent-id-query.asciidoc @@ -54,17 +54,21 @@ better as it does not need to do a join: [source,js] -------------------------------------------------- +GET /my_index/_search { - "has_parent": { - "type": "blog", - "query": { - "term": { - "_id": "1" + "query": { + "has_parent": { + "type": "blog_post", + "query": { + "term": { + "_id": "1" + } } } } } -------------------------------------------------- +// CONSOLE ==== Parameters diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 267647c9dbb..896fa7ec3ee 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -13,26 +13,27 @@ Create an index with two mappings: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index" -d' +PUT /my-index { - "mappings": { - "doctype": { - "properties": { - "message": { - "type": "string" + "mappings": { + "doctype": { + "properties": { + "message": { + "type": "string" + } + } + }, + "queries": { + "properties": { + "query": { + "type": "percolator" + } + } } - } - }, - "queries": { - "properties": { - "query": { - "type": "percolator" - } - } } - } -}' +} -------------------------------------------------- +// CONSOLE The `doctype` mapping is the mapping used to preprocess the document defined in the `percolator` query before it @@ -50,20 +51,24 @@ Register a query in the percolator: [source,js] -------------------------------------------------- -curl -XPUT 'localhost:9200/my-index/queries/1' -d '{ +PUT /my-index/queries/1 +{ "query" : { "match" : { "message" : "bonsai tree" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] Match a document to the registered percolator queries: [source,js] -------------------------------------------------- -curl -XGET 'localhost:9200/my-index/_search' -d '{ +GET /my-index/_search +{ "query" : { "percolate" : { "field" : "query", @@ -73,8 +78,10 @@ curl -XGET 'localhost:9200/my-index/_search' -d '{ } } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] The above request will yield the following response: @@ -151,12 +158,13 @@ Index the document we want to percolate: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index/message/1" -d' +PUT /my-index/message/1 { "message" : "A new bonsai tree in the office" -}' +} -------------------------------------------------- - +// CONSOLE +// TEST[continued] Index response: [source,js] @@ -179,7 +187,7 @@ Percolating an existing document, using the index response as basis to build to [source,js] -------------------------------------------------- -curl -XGET "http://localhost:9200/my-index/_search" -d' +GET /my-index/_search { "query" : { "percolate" : { @@ -191,8 +199,10 @@ curl -XGET "http://localhost:9200/my-index/_search" -d' "version" : 1 <1> } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] <1> The version is optional, but useful in certain cases. We can then ensure that we are try to percolate the document we just have indexed. A change may be made after we have indexed, and if that is the @@ -216,35 +226,39 @@ Save a query: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index/queries/1" -d' +PUT /my-index/queries/1 { "query" : { "match" : { "message" : "brown fox" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] Save another query: [source,js] -------------------------------------------------- -curl -XPUT "http://localhost:9200/my-index/queries/2" -d' +PUT /my-index/queries/2 { "query" : { "match" : { "message" : "lazy dog" } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] Execute a search request with the `percolate` query and highlighting enabled: [source,js] -------------------------------------------------- -curl -XGET "http://localhost:9200/my-index/_search" -d' +GET /my-index/_search { "query" : { "percolate" : { @@ -260,8 +274,10 @@ curl -XGET "http://localhost:9200/my-index/_search" -d' "message": {} } } -}' +} -------------------------------------------------- +// CONSOLE +// TEST[continued] This will yield the following response. @@ -327,13 +343,36 @@ the document defined in the `percolate` query. ==== How it Works Under the Hood When indexing a document into an index that has the <> mapping configured, the query -part of the documents gets parsed into a Lucene query and is kept in memory until that percolator document is removed. -So, all the active percolator queries are kept in memory. +part of the documents gets parsed into a Lucene query and are stored into the Lucene index. A binary representation +of the query gets stored, but also the query's terms are analyzed and stored into an indexed field. At search time, the document specified in the request gets parsed into a Lucene document and is stored in a in-memory -temporary Lucene index. This in-memory index can just hold this one document and it is optimized for that. Then all the queries -that are registered to the index that the search request is targeted for, are going to be executed on this single document -in-memory index. This happens on each shard the search request needs to execute. +temporary Lucene index. This in-memory index can just hold this one document and it is optimized for that. After this +a special query is build based on the terms in the in-memory index that select candidate percolator queries based on +their indexed query terms. These queries are then evaluated by the in-memory index if they actually match. -By using `routing` or additional queries the amount of percolator queries that need to be executed can be reduced and thus -the time the search API needs to run can be decreased. \ No newline at end of file +The selecting of candidate percolator queries matches is an important performance optimization during the execution +of the `percolate` query as it can significantly reduce the number of candidate matches the in-memory index needs to +evaluate. The reason the `percolate` query can do this is because during indexing of the percolator queries the query +terms are being extracted and indexed with the percolator query. Unfortunately the percolator cannot extract terms from +all queries (for example the `wildcard` or `geo_shape` query) and as a result of that in certain cases the percolator +can't do the selecting optimization (for example if an unsupported query is defined in a required clause of a boolean query +or the unsupported query is the only query in the percolator document). These queries are marked by the percolator and +can be found by running the following search: + + +[source,js] +--------------------------------------------------- +GET /_search +{ + "query": { + "term" : { + "query.unknown_query" : "" + } + } +} +--------------------------------------------------- +// CONSOLE + +NOTE: The above example assumes that there is a `query` field of type +`percolator` in the mappings. diff --git a/docs/reference/query-dsl/prefix-query.asciidoc b/docs/reference/query-dsl/prefix-query.asciidoc index cf26e850ad8..d2b75d10e5f 100644 --- a/docs/reference/query-dsl/prefix-query.asciidoc +++ b/docs/reference/query-dsl/prefix-query.asciidoc @@ -8,28 +8,37 @@ that starts with `ki`: [source,js] -------------------------------------------------- -{ +GET /_search +{ "query": { "prefix" : { "user" : "ki" } + } } -------------------------------------------------- +// CONSOLE A boost can also be associated with the query: [source,js] -------------------------------------------------- -{ +GET /_search +{ "query": { "prefix" : { "user" : { "value" : "ki", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE Or : [source,js] -------------------------------------------------- -{ +GET /_search +{ "query": { "prefix" : { "user" : { "prefix" : "ki", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE This multi term query allows you to control how it gets rewritten using the <> diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 9ad68ad4f73..60477d6e28a 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -6,13 +6,17 @@ an example: [source,js] -------------------------------------------------- +GET /_search { - "query_string" : { - "default_field" : "content", - "query" : "this AND that OR thus" + "query": { + "query_string" : { + "default_field" : "content", + "query" : "this AND that OR thus" + } } } -------------------------------------------------- +// CONSOLE The `query_string` top level parameters include: @@ -113,25 +117,33 @@ For example, the following query [source,js] -------------------------------------------------- +GET /_search { - "query_string" : { - "fields" : ["content", "name"], - "query" : "this AND that" + "query": { + "query_string" : { + "fields" : ["content", "name"], + "query" : "this AND that" + } } } -------------------------------------------------- +// CONSOLE matches the same words as [source,js] -------------------------------------------------- +GET /_search { - "query_string": { - "query": "(content:this OR name:this) AND (content:that OR name:that)" + "query": { + "query_string": { + "query": "(content:this OR name:this) AND (content:that OR name:that)" + } } } -------------------------------------------------- +// CONSOLE Since several queries are generated from the individual search terms, combining them can be automatically done using either a `dis_max` query or a @@ -140,14 +152,18 @@ notation): [source,js] -------------------------------------------------- +GET /_search { - "query_string" : { - "fields" : ["content", "name^5"], - "query" : "this AND that OR thus", - "use_dis_max" : true + "query": { + "query_string" : { + "fields" : ["content", "name^5"], + "query" : "this AND that OR thus", + "use_dis_max" : true + } } } -------------------------------------------------- +// CONSOLE Simple wildcard can also be used to search "within" specific inner elements of the document. For example, if we have a `city` object with @@ -156,14 +172,18 @@ search on all "city" fields: [source,js] -------------------------------------------------- +GET /_search { - "query_string" : { - "fields" : ["city.*"], - "query" : "this AND that OR thus", - "use_dis_max" : true + "query": { + "query_string" : { + "fields" : ["city.*"], + "query" : "this AND that OR thus", + "use_dis_max" : true + } } } -------------------------------------------------- +// CONSOLE Another option is to provide the wildcard fields search in the query string itself (properly escaping the `*` sign), for example: @@ -188,13 +208,17 @@ introduced fields included). For example: [source,js] -------------------------------------------------- +GET /_search { - "query_string" : { - "fields" : ["content", "name.*^5"], - "query" : "this AND that OR thus", - "use_dis_max" : true + "query": { + "query_string" : { + "fields" : ["content", "name.*^5"], + "query" : "this AND that OR thus", + "use_dis_max" : true + } } } -------------------------------------------------- +// CONSOLE include::query-string-syntax.asciidoc[] diff --git a/docs/reference/query-dsl/query_filter_context.asciidoc b/docs/reference/query-dsl/query_filter_context.asciidoc index 6fadc36aa73..79f8c4bd960 100644 --- a/docs/reference/query-dsl/query_filter_context.asciidoc +++ b/docs/reference/query-dsl/query_filter_context.asciidoc @@ -47,7 +47,7 @@ conditions are met: [source,js] ------------------------------------ -GET _search +GET /_search { "query": { <1> "bool": { <2> @@ -63,6 +63,7 @@ GET _search } } ------------------------------------ +// CONSOLE <1> The `query` parameter indicates query context. <2> The `bool` and two `match` clauses are used in query context, which means that they are used to score how well each document diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index eaf1dca8181..a0005ff3ff2 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -9,16 +9,20 @@ a `NumericRangeQuery`. The following example returns all documents where [source,js] -------------------------------------------------- +GET _search { - "range" : { - "age" : { - "gte" : 10, - "lte" : 20, - "boost" : 2.0 + "query": { + "range" : { + "age" : { + "gte" : 10, + "lte" : 20, + "boost" : 2.0 + } } } } -------------------------------------------------- +// CONSOLE The `range` query accepts the following parameters: @@ -38,15 +42,19 @@ specified using <>: [source,js] -------------------------------------------------- +GET _search { - "range" : { - "date" : { - "gte" : "now-1d/d", - "lt" : "now/d" + "query": { + "range" : { + "date" : { + "gte" : "now-1d/d", + "lt" : "now/d" + } } } } -------------------------------------------------- +// CONSOLE ===== Date math and rounding @@ -86,16 +94,20 @@ passing the `format` parameter to the `range` query: [source,js] -------------------------------------------------- +GET _search { - "range" : { - "born" : { - "gte": "01/01/2012", - "lte": "2013", - "format": "dd/MM/yyyy||yyyy" + "query": { + "range" : { + "born" : { + "gte": "01/01/2012", + "lte": "2013", + "format": "dd/MM/yyyy||yyyy" + } } } } -------------------------------------------------- +// CONSOLE ===== Time zone in range queries @@ -105,15 +117,19 @@ accepts it), or it can be specified as the `time_zone` parameter: [source,js] -------------------------------------------------- +GET _search { - "range" : { - "timestamp" : { - "gte": "2015-01-01 00:00:00", <1> - "lte": "now", <2> - "time_zone": "+01:00" + "query": { + "range" : { + "timestamp" : { + "gte": "2015-01-01 00:00:00", <1> + "lte": "now", <2> + "time_zone": "+01:00" + } } } } -------------------------------------------------- +// CONSOLE <1> This date will be converted to `2014-12-31T23:00:00 UTC`. <2> `now` is not affected by the `time_zone` parameter (dates must be stored as UTC). diff --git a/docs/reference/query-dsl/regexp-query.asciidoc b/docs/reference/query-dsl/regexp-query.asciidoc index 692caf43480..2d57f1d7dd0 100644 --- a/docs/reference/query-dsl/regexp-query.asciidoc +++ b/docs/reference/query-dsl/regexp-query.asciidoc @@ -15,40 +15,52 @@ matchers like `.*?+` will mostly lower performance. [source,js] -------------------------------------------------- +GET /_search { - "regexp":{ - "name.first": "s.*y" + "query": { + "regexp":{ + "name.first": "s.*y" + } } } -------------------------------------------------- +// CONSOLE Boosting is also supported [source,js] -------------------------------------------------- +GET /_search { - "regexp":{ - "name.first":{ - "value":"s.*y", - "boost":1.2 + "query": { + "regexp":{ + "name.first":{ + "value":"s.*y", + "boost":1.2 + } } } } -------------------------------------------------- +// CONSOLE You can also use special flags [source,js] -------------------------------------------------- +GET /_search { - "regexp":{ - "name.first": { - "value": "s.*y", - "flags" : "INTERSECTION|COMPLEMENT|EMPTY" + "query": { + "regexp":{ + "name.first": { + "value": "s.*y", + "flags" : "INTERSECTION|COMPLEMENT|EMPTY" + } } } } -------------------------------------------------- +// CONSOLE Possible flags are `ALL` (default), `ANYSTRING`, `COMPLEMENT`, `EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`. Please check the @@ -64,16 +76,19 @@ this limit to allow more complex regular expressions to execute. [source,js] -------------------------------------------------- +GET /_search { - "regexp":{ - "name.first": { - "value": "s.*y", - "flags" : "INTERSECTION|COMPLEMENT|EMPTY", - "max_determinized_states": 20000 + "query": { + "regexp":{ + "name.first": { + "value": "s.*y", + "flags" : "INTERSECTION|COMPLEMENT|EMPTY", + "max_determinized_states": 20000 + } } } } -------------------------------------------------- - +// CONSOLE include::regexp-syntax.asciidoc[] diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index 223460f723d..ee06a1b64bd 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -7,17 +7,20 @@ context, for example: [source,js] ---------------------------------------------- -"bool" : { - "must" : { - ... - }, - "filter" : { - "script" : { - "script" : "doc['num1'].value > 1" +GET /_search +{ + "query": { + "bool" : { + "must" : { + "script" : { + "script" : "doc['num1'].value > 1" + } + } } } } ---------------------------------------------- +// CONSOLE [float] ==== Custom Parameters @@ -28,20 +31,23 @@ to use the ability to pass parameters to the script itself, for example: [source,js] ---------------------------------------------- -"bool" : { - "must" : { - ... - }, - "filter" : { - "script" : { - "script" : { - "inline" : "doc['num1'].value > param1" - "params" : { - "param1" : 5 +GET /_search +{ + "query": { + "bool" : { + "must" : { + "script" : { + "script" : { + "inline" : "doc['num1'].value > param1", + "params" : { + "param1" : 5 + } + } } } } } } ---------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index 572c0661fa4..796f2517fea 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -8,15 +8,19 @@ an example: [source,js] -------------------------------------------------- +GET /_search { + "query": { "simple_query_string" : { "query": "\"fried eggs\" +(eggplant | potato) -frittata", "analyzer": "snowball", "fields": ["body^5","_all"], "default_operator": "and" } + } } -------------------------------------------------- +// CONSOLE The `simple_query_string` top level parameters include: @@ -94,13 +98,17 @@ introduced fields included). For example: [source,js] -------------------------------------------------- +GET /_search { - "simple_query_string" : { - "fields" : ["content", "name.*^5"], - "query" : "foo bar baz" + "query": { + "simple_query_string" : { + "fields" : ["content", "name.*^5"], + "query" : "foo bar baz" + } } } -------------------------------------------------- +// CONSOLE [float] ==== Flags @@ -110,13 +118,17 @@ should be enabled. It is specified as a `|`-delimited string with the [source,js] -------------------------------------------------- +GET /_search { - "simple_query_string" : { - "query" : "foo | bar + baz*", - "flags" : "OR|AND|PREFIX" + "query": { + "simple_query_string" : { + "query" : "foo | bar + baz*", + "flags" : "OR|AND|PREFIX" + } } } -------------------------------------------------- +// CONSOLE The available flags are: `ALL`, `NONE`, `AND`, `OR`, `NOT`, `PREFIX`, `PHRASE`, `PRECEDENCE`, `ESCAPE`, `WHITESPACE`, `FUZZY`, `NEAR`, and `SLOP`. diff --git a/docs/reference/query-dsl/span-containing-query.asciidoc b/docs/reference/query-dsl/span-containing-query.asciidoc index 965bf855b6f..638c6999233 100644 --- a/docs/reference/query-dsl/span-containing-query.asciidoc +++ b/docs/reference/query-dsl/span-containing-query.asciidoc @@ -6,24 +6,28 @@ query maps to Lucene `SpanContainingQuery`. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "span_containing" : { - "little" : { - "span_term" : { "field1" : "foo" } - }, - "big" : { - "span_near" : { - "clauses" : [ - { "span_term" : { "field1" : "bar" } }, - { "span_term" : { "field1" : "baz" } } - ], - "slop" : 5, - "in_order" : true + "query": { + "span_containing" : { + "little" : { + "span_term" : { "field1" : "foo" } + }, + "big" : { + "span_near" : { + "clauses" : [ + { "span_term" : { "field1" : "bar" } }, + { "span_term" : { "field1" : "baz" } } + ], + "slop" : 5, + "in_order" : true + } } } } } -------------------------------------------------- +// CONSOLE The `big` and `little` clauses can be any span type query. Matching spans from `big` that contain matches from `little` are returned. diff --git a/docs/reference/query-dsl/span-first-query.asciidoc b/docs/reference/query-dsl/span-first-query.asciidoc index 74fe7ff88ba..dba7932661d 100644 --- a/docs/reference/query-dsl/span-first-query.asciidoc +++ b/docs/reference/query-dsl/span-first-query.asciidoc @@ -6,15 +6,19 @@ to Lucene `SpanFirstQuery`. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "span_first" : { - "match" : { - "span_term" : { "user" : "kimchy" } - }, - "end" : 3 + "query": { + "span_first" : { + "match" : { + "span_term" : { "user" : "kimchy" } + }, + "end" : 3 + } } } -------------------------------------------------- +// CONSOLE The `match` clause can be any other span type query. The `end` controls the maximum end position permitted in a match. diff --git a/docs/reference/query-dsl/span-multi-term-query.asciidoc b/docs/reference/query-dsl/span-multi-term-query.asciidoc index af3da5cf7dd..3d1672753d2 100644 --- a/docs/reference/query-dsl/span-multi-term-query.asciidoc +++ b/docs/reference/query-dsl/span-multi-term-query.asciidoc @@ -7,24 +7,32 @@ it can be nested. Example: [source,js] -------------------------------------------------- +GET /_search { - "span_multi":{ - "match":{ - "prefix" : { "user" : { "value" : "ki" } } + "query": { + "span_multi":{ + "match":{ + "prefix" : { "user" : { "value" : "ki" } } + } } } } -------------------------------------------------- +// CONSOLE A boost can also be associated with the query: [source,js] -------------------------------------------------- +GET /_search { - "span_multi":{ - "match":{ - "prefix" : { "user" : { "value" : "ki", "boost" : 1.08 } } + "query": { + "span_multi":{ + "match":{ + "prefix" : { "user" : { "value" : "ki", "boost" : 1.08 } } + } } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/span-near-query.asciidoc b/docs/reference/query-dsl/span-near-query.asciidoc index 258bc3b51ad..e69be783e3d 100644 --- a/docs/reference/query-dsl/span-near-query.asciidoc +++ b/docs/reference/query-dsl/span-near-query.asciidoc @@ -8,18 +8,22 @@ matches are required to be in-order. The span near query maps to Lucene [source,js] -------------------------------------------------- +GET /_search { - "span_near" : { - "clauses" : [ - { "span_term" : { "field" : "value1" } }, - { "span_term" : { "field" : "value2" } }, - { "span_term" : { "field" : "value3" } } - ], - "slop" : 12, - "in_order" : false + "query": { + "span_near" : { + "clauses" : [ + { "span_term" : { "field" : "value1" } }, + { "span_term" : { "field" : "value2" } }, + { "span_term" : { "field" : "value3" } } + ], + "slop" : 12, + "in_order" : false + } } } -------------------------------------------------- +// CONSOLE The `clauses` element is a list of one or more other span type queries and the `slop` controls the maximum number of intervening unmatched diff --git a/docs/reference/query-dsl/span-not-query.asciidoc b/docs/reference/query-dsl/span-not-query.asciidoc index 73186985b0d..5a648bd4b0e 100644 --- a/docs/reference/query-dsl/span-not-query.asciidoc +++ b/docs/reference/query-dsl/span-not-query.asciidoc @@ -6,24 +6,28 @@ query maps to Lucene `SpanNotQuery`. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "span_not" : { - "include" : { - "span_term" : { "field1" : "hoya" } - }, - "exclude" : { - "span_near" : { - "clauses" : [ - { "span_term" : { "field1" : "la" } }, - { "span_term" : { "field1" : "hoya" } } - ], - "slop" : 0, - "in_order" : true + "query": { + "span_not" : { + "include" : { + "span_term" : { "field1" : "hoya" } + }, + "exclude" : { + "span_near" : { + "clauses" : [ + { "span_term" : { "field1" : "la" } }, + { "span_term" : { "field1" : "hoya" } } + ], + "slop" : 0, + "in_order" : true + } } } } } -------------------------------------------------- +// CONSOLE The `include` and `exclude` clauses can be any span type query. The `include` clause is the span query whose matches are filtered, and the diff --git a/docs/reference/query-dsl/span-or-query.asciidoc b/docs/reference/query-dsl/span-or-query.asciidoc index 72a4ce8724b..470935d6f5c 100644 --- a/docs/reference/query-dsl/span-or-query.asciidoc +++ b/docs/reference/query-dsl/span-or-query.asciidoc @@ -6,15 +6,19 @@ Matches the union of its span clauses. The span or query maps to Lucene [source,js] -------------------------------------------------- +GET /_search { - "span_or" : { - "clauses" : [ - { "span_term" : { "field" : "value1" } }, - { "span_term" : { "field" : "value2" } }, - { "span_term" : { "field" : "value3" } } - ] + "query": { + "span_or" : { + "clauses" : [ + { "span_term" : { "field" : "value1" } }, + { "span_term" : { "field" : "value2" } }, + { "span_term" : { "field" : "value3" } } + ] + } } } -------------------------------------------------- +// CONSOLE The `clauses` element is a list of one or more other span type queries. diff --git a/docs/reference/query-dsl/span-term-query.asciidoc b/docs/reference/query-dsl/span-term-query.asciidoc index 9de86d48684..1b12a3c35f7 100644 --- a/docs/reference/query-dsl/span-term-query.asciidoc +++ b/docs/reference/query-dsl/span-term-query.asciidoc @@ -6,25 +6,37 @@ Matches spans containing a term. The span term query maps to Lucene [source,js] -------------------------------------------------- +GET /_search { - "span_term" : { "user" : "kimchy" } + "query": { + "span_term" : { "user" : "kimchy" } + } } -------------------------------------------------- +// CONSOLE A boost can also be associated with the query: [source,js] -------------------------------------------------- +GET /_search { - "span_term" : { "user" : { "value" : "kimchy", "boost" : 2.0 } } + "query": { + "span_term" : { "user" : { "value" : "kimchy", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE Or : [source,js] -------------------------------------------------- +GET /_search { - "span_term" : { "user" : { "term" : "kimchy", "boost" : 2.0 } } + "query": { + "span_term" : { "user" : { "term" : "kimchy", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/span-within-query.asciidoc b/docs/reference/query-dsl/span-within-query.asciidoc index dc5c4bbfdfd..b70835c4134 100644 --- a/docs/reference/query-dsl/span-within-query.asciidoc +++ b/docs/reference/query-dsl/span-within-query.asciidoc @@ -6,24 +6,28 @@ query maps to Lucene `SpanWithinQuery`. Here is an example: [source,js] -------------------------------------------------- +GET /_search { - "span_within" : { - "little" : { - "span_term" : { "field1" : "foo" } - }, - "big" : { - "span_near" : { - "clauses" : [ - { "span_term" : { "field1" : "bar" } }, - { "span_term" : { "field1" : "baz" } } - ], - "slop" : 5, - "in_order" : true + "query": { + "span_within" : { + "little" : { + "span_term" : { "field1" : "foo" } + }, + "big" : { + "span_near" : { + "clauses" : [ + { "span_term" : { "field1" : "bar" } }, + { "span_term" : { "field1" : "baz" } } + ], + "slop" : 5, + "in_order" : true + } } } } } -------------------------------------------------- +// CONSOLE The `big` and `little` clauses can be any span type query. Matching spans from `little` that are enclosed within `big` are returned. diff --git a/docs/reference/query-dsl/template-query.asciidoc b/docs/reference/query-dsl/template-query.asciidoc index 35365e6f9b2..f66dbe56d3e 100644 --- a/docs/reference/query-dsl/template-query.asciidoc +++ b/docs/reference/query-dsl/template-query.asciidoc @@ -19,8 +19,8 @@ GET /_search } } } - ------------------------------------------ +// CONSOLE The above request is translated into: @@ -34,8 +34,8 @@ GET /_search } } } - ------------------------------------------ +// CONSOLE Alternatively passing the template as an escaped string works as well: @@ -53,6 +53,8 @@ GET /_search } } ------------------------------------------ +// CONSOLE + <1> New line characters (`\n`) should be escaped as `\\n` or removed, and quotes (`"`) should be escaped as `\\"`. @@ -77,6 +79,8 @@ GET /_search } } ------------------------------------------ +// CONSOLE + <1> Name of the query template in `config/scripts/`, i.e., `my_template.mustache`. Alternatively, you can register a query template in the cluster state with: @@ -85,9 +89,10 @@ Alternatively, you can register a query template in the cluster state with: ------------------------------------------ PUT /_search/template/my_template { - "template": { "match": { "text": "{{query_string}}" }}, + "template": { "match": { "text": "{{query_string}}" }} } ------------------------------------------ +// CONSOLE and refer to it in the `template` query with the `id` parameter: @@ -106,9 +111,13 @@ GET /_search } } ------------------------------------------ +// CONSOLE +// TEST[continued] + <1> Name of the query template in `config/scripts/`, i.e., `my_template.mustache`. There is also a dedicated `template` endpoint, allows you to template an entire search request. Please see <> for more details. + diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index 08df1e6fece..e00c18bb56f 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -6,14 +6,18 @@ Filters documents that have fields that match any of the provided terms [source,js] -------------------------------------------------- +GET /_search { - "constant_score" : { - "filter" : { - "terms" : { "user" : ["kimchy", "elasticsearch"]} + "query": { + "constant_score" : { + "filter" : { + "terms" : { "user" : ["kimchy", "elasticsearch"]} + } } } } -------------------------------------------------- +// CONSOLE The `terms` query is also aliased with `in` as the filter name for simpler usage deprecated[5.0.0,use `terms` instead]. @@ -63,33 +67,37 @@ possible, reducing the need for networking. [float] ===== Terms lookup twitter example +At first we index the information for user with id 2, specifically, its +followers, than index a tweet from user with id 1. Finally we search on +all the tweets that match the followers of user 2. [source,js] -------------------------------------------------- -# index the information for user with id 2, specifically, its followers -curl -XPUT localhost:9200/users/user/2 -d '{ - "followers" : ["1", "3"] -}' +PUT /users/user/2 +{ + "followers" : ["1", "3"] +} -# index a tweet, from user with id 1 -curl -XPUT localhost:9200/tweets/tweet/1 -d '{ - "user" : "1" -}' +PUT /tweets/tweet/1 +{ + "user" : "1" +} -# search on all the tweets that match the followers of user 2 -curl -XGET localhost:9200/tweets/_search -d '{ - "query" : { - "terms" : { - "user" : { - "index" : "users", - "type" : "user", - "id" : "2", - "path" : "followers" - } +GET /tweets/_search +{ + "query" : { + "terms" : { + "user" : { + "index" : "users", + "type" : "user", + "id" : "2", + "path" : "followers" + } + } } - } -}' +} -------------------------------------------------- +// CONSOLE The structure of the external terms document can also include array of inner objects, for example: diff --git a/docs/reference/query-dsl/type-query.asciidoc b/docs/reference/query-dsl/type-query.asciidoc index d3ef1a30fb1..05e909bc366 100644 --- a/docs/reference/query-dsl/type-query.asciidoc +++ b/docs/reference/query-dsl/type-query.asciidoc @@ -5,9 +5,13 @@ Filters documents matching the provided document / mapping type. [source,js] -------------------------------------------------- +GET /_search { - "type" : { - "value" : "my_type" + "query": { + "type" : { + "value" : "my_type" + } } } -------------------------------------------------- +// CONSOLE diff --git a/docs/reference/query-dsl/wildcard-query.asciidoc b/docs/reference/query-dsl/wildcard-query.asciidoc index d72dbec2481..ad82c029d8c 100644 --- a/docs/reference/query-dsl/wildcard-query.asciidoc +++ b/docs/reference/query-dsl/wildcard-query.asciidoc @@ -11,28 +11,40 @@ query maps to Lucene `WildcardQuery`. [source,js] -------------------------------------------------- +GET /_search { - "wildcard" : { "user" : "ki*y" } + "query": { + "wildcard" : { "user" : "ki*y" } + } } -------------------------------------------------- +// CONSOLE A boost can also be associated with the query: [source,js] -------------------------------------------------- +GET /_search { - "wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } } + "query": { + "wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE Or : [source,js] -------------------------------------------------- +GET /_search { - "wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } } + "query": { + "wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } } + } } -------------------------------------------------- +// CONSOLE This multi term query allows to control how it gets rewritten using the <> diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index 53d6e23fa80..c235c37b338 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -8,8 +8,11 @@ The following is an example of the search request body: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "content" : {} @@ -17,6 +20,7 @@ The following is an example of the search request body: } } -------------------------------------------------- +// CONSOLE In the above case, the `content` field will be highlighted for each search hit (there will be another element in each search hit, called @@ -131,8 +135,11 @@ The following is an example that forces the use of the plain highlighter: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "content" : {"type" : "plain"} @@ -140,6 +147,7 @@ The following is an example that forces the use of the plain highlighter: } } -------------------------------------------------- +// CONSOLE ==== Force highlighting on source @@ -148,8 +156,11 @@ stored separately. Defaults to `false`. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "content" : {"force_source" : true} @@ -157,6 +168,7 @@ stored separately. Defaults to `false`. } } -------------------------------------------------- +// CONSOLE [[tags]] ==== Highlighting Tags @@ -167,8 +179,11 @@ for example: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "pre_tags" : [""], "post_tags" : [""], @@ -178,14 +193,18 @@ for example: } } -------------------------------------------------- +// CONSOLE Using the fast vector highlighter there can be more tags, and the "importance" is ordered. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "pre_tags" : ["", ""], "post_tags" : ["", ""], @@ -195,6 +214,7 @@ is ordered. } } -------------------------------------------------- +// CONSOLE There are also built in "tag" schemas, with currently a single schema called `styled` with the following `pre_tags`: @@ -213,8 +233,11 @@ is an example of switching tag schemas: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "tags_schema" : "styled", "fields" : { @@ -223,6 +246,7 @@ is an example of switching tag schemas: } } -------------------------------------------------- +// CONSOLE ==== Encoder @@ -240,8 +264,11 @@ For example: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "content" : {"fragment_size" : 150, "number_of_fragments" : 3} @@ -249,6 +276,7 @@ For example: } } -------------------------------------------------- +// CONSOLE The `fragment_size` is ignored when using the postings highlighter, as it outputs sentences regardless of their length. @@ -258,8 +286,11 @@ to be sorted by score: [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "order" : "score", "fields" : { @@ -268,6 +299,7 @@ to be sorted by score: } } -------------------------------------------------- +// CONSOLE If the `number_of_fragments` value is set to `0` then no fragments are produced, instead the whole content of the field is returned, and of @@ -277,8 +309,11 @@ is required. Note that `fragment_size` is ignored in this case. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "_all" : {}, @@ -287,6 +322,7 @@ is required. Note that `fragment_size` is ignored in this case. } } -------------------------------------------------- +// CONSOLE When using `fvh` one can use `fragment_offset` parameter to control the margin to start highlighting from. @@ -302,8 +338,11 @@ greater than `0`. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "fields" : { "content" : { @@ -315,6 +354,7 @@ greater than `0`. } } -------------------------------------------------- +// CONSOLE ==== Highlight query @@ -330,6 +370,7 @@ Generally it is better to include the search query in the query and the rescore query in `highlight_query`. [source,js] -------------------------------------------------- +GET /_search { "fields": [ "_id" ], "query" : { @@ -385,6 +426,7 @@ query and the rescore query in `highlight_query`. } } -------------------------------------------------- +// CONSOLE Note that the score of text fragment in this case is calculated by the Lucene highlighting framework. For implementation details you can check the @@ -400,12 +442,14 @@ at the field level. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { "number_of_fragments" : 3, "fragment_size" : 150, - "tag_schema" : "styled", "fields" : { "_all" : { "pre_tags" : [""], "post_tags" : [""] }, "bio.title" : { "number_of_fragments" : 0 }, @@ -415,6 +459,7 @@ at the field level. } } -------------------------------------------------- +// CONSOLE [[field-match]] ==== Require Field Match @@ -426,14 +471,20 @@ match will be highlighted. [source,js] -------------------------------------------------- +GET /_search { - "query" : {...}, + "query" : { + "match": { "user": "kimchy" } + }, "highlight" : { - "require_field_match": false - "fields" : {...} + "require_field_match": false, + "fields": { + "_all" : { "pre_tags" : [""], "post_tags" : [""] } + } } } -------------------------------------------------- +// CONSOLE [[boundary-characters]] ==== Boundary Characters @@ -462,6 +513,7 @@ analyzer and `content.plain` is analyzed by the `standard` analyzer. [source,js] -------------------------------------------------- +GET /_search { "query": { "query_string": { @@ -480,6 +532,7 @@ analyzer and `content.plain` is analyzed by the `standard` analyzer. } } -------------------------------------------------- +// CONSOLE The above matches both "run with scissors" and "running with scissors" and would highlight "running" and "scissors" but not "run". If both phrases appear in a large document then "running with scissors" is @@ -488,6 +541,7 @@ are more matches in that fragment. [source,js] -------------------------------------------------- +GET /_search { "query": { "query_string": { @@ -506,12 +560,14 @@ are more matches in that fragment. } } -------------------------------------------------- +// CONSOLE The above highlights "run" as well as "running" and "scissors" but still sorts "running with scissors" above "run with scissors" because the plain match ("running") is boosted. [source,js] -------------------------------------------------- +GET /_search { "query": { "query_string": { @@ -530,6 +586,8 @@ the plain match ("running") is boosted. } } -------------------------------------------------- +// CONSOLE + The above query wouldn't highlight "run" or "scissor" but shows that it is just fine not to list the field to which the matches are combined (`content`) in the matched fields. diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index ceb3d8c38d3..68f73fc96b8 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -26,7 +26,7 @@ setting, as follows: [source,sh] ------------------------------- -./bin/elasticsearch -E es.path.conf=/path/to/my/config/ +./bin/elasticsearch -Epath.conf=/path/to/my/config/ ------------------------------- [float] @@ -93,15 +93,14 @@ is used in the settings and the process is run as a service or in the background === Setting default settings New default settings may be specified on the command line using the -`es.default.` prefix instead of the `es.` prefix. This will specify a value -that will be used by default unless another value is specified in the config -file. +`default.` prefix. This will specify a value that will be used by +default unless another value is specified in the config file. For instance, if Elasticsearch is started as follows: [source,sh] --------------------------- -./bin/elasticsearch -E es.default.node.name=My_Node +./bin/elasticsearch -Edefault.node.name=My_Node --------------------------- the value for `node.name` will be `My_Node`, unless it is overwritten on the diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index e3c7622d064..0d2e8bf04f6 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -45,15 +45,14 @@ file by default. The format of this config file is explained in <>. Any settings that can be specified in the config file can also be specified on -the command line, using the `-E` syntax, and prepending `es.` to the setting -name, as follows: +the command line, using the `-E` syntax as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- -NOTE: Values that contain spaces must be surrounded with quotes. For instance `-E es.path.logs="C:\My Logs\logs"`. +NOTE: Values that contain spaces must be surrounded with quotes. For instance `-Epath.logs="C:\My Logs\logs"`. TIP: Typically, any cluster-wide settings (like `cluster.name`) should be added to the `elasticsearch.yml` config file, while any node-specific settings diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/zip-targz.asciidoc index 0ed67cb9ce9..7fc41a0f3f8 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/zip-targz.asciidoc @@ -93,7 +93,7 @@ name, as follows: [source,sh] -------------------------------------------- -./bin/elasticsearch -d -E es.cluster.name=my_cluster -E es.node.name=node_1 +./bin/elasticsearch -d -Ecluster.name=my_cluster -Enode.name=node_1 -------------------------------------------- TIP: Typically, any cluster-wide settings (like `cluster.name`) should be diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java index 1cb21887944..cd95f13f63a 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionPlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.expression; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; public class ExpressionPlugin extends Plugin { @@ -38,6 +37,6 @@ public class ExpressionPlugin extends Plugin { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExpressionScriptEngineService.class, - ExpressionScriptEngineService.NAME, ScriptMode.ON)); + ExpressionScriptEngineService.NAME, true)); } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java index 54553565bcb..edc6a7c3924 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java @@ -78,7 +78,7 @@ class ExpressionSearchScript implements SearchScript { } @Override - public Object run() { return new Double(evaluate()); } + public Object run() { return Double.valueOf(evaluate()); } @Override public long runAsLong() { return (long)evaluate(); } diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 700a1aecbc2..5d06196a83b 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -201,9 +201,9 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri * Return a script object with the given vars from the compiled script object */ @SuppressWarnings("unchecked") - private Script createScript(Object compiledScript, Map vars) throws InstantiationException, IllegalAccessException { + private Script createScript(Object compiledScript, Map vars) throws ReflectiveOperationException { Class scriptClass = (Class) compiledScript; - Script scriptObject = (Script) scriptClass.newInstance(); + Script scriptObject = (Script) scriptClass.getConstructor().newInstance(); Binding binding = new Binding(); binding.getVariables().putAll(vars); scriptObject.setBinding(binding); @@ -218,7 +218,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri allVars.putAll(vars); } return new GroovyScript(compiledScript, createScript(compiledScript.compiled(), allVars), this.logger); - } catch (Exception e) { + } catch (ReflectiveOperationException e) { throw new ScriptException("failed to build executable " + compiledScript, e); } } @@ -238,7 +238,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri Script scriptObject; try { scriptObject = createScript(compiledScript.compiled(), allVars); - } catch (InstantiationException | IllegalAccessException e) { + } catch (ReflectiveOperationException e) { throw new ScriptException("failed to build search " + compiledScript, e); } return new GroovyScript(compiledScript, scriptObject, leafLookup, logger); diff --git a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy index a2318191540..82482448677 100644 --- a/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-groovy/src/main/plugin-metadata/plugin-security.policy @@ -25,6 +25,7 @@ grant { // needed by groovy engine permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; + permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.reflect"; // needed by GroovyScriptEngineService to close its classloader (why?) permission java.lang.RuntimePermission "closeClassLoader"; // Allow executing groovy scripts with codesource of /untrusted @@ -49,6 +50,8 @@ grant { permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.vmplugin.v7.IndyInterface"; permission org.elasticsearch.script.ClassPermission "sun.reflect.ConstructorAccessorImpl"; permission org.elasticsearch.script.ClassPermission "sun.reflect.MethodAccessorImpl"; + permission org.elasticsearch.script.ClassPermission "jdk.internal.reflect.ConstructorAccessorImpl"; + permission org.elasticsearch.script.ClassPermission "jdk.internal.reflect.MethodAccessorImpl"; permission org.elasticsearch.script.ClassPermission "groovy.lang.Closure"; permission org.elasticsearch.script.ClassPermission "org.codehaus.groovy.runtime.GeneratedClosure"; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 00710dc2b4d..8cecbf2c787 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -45,8 +44,10 @@ import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -413,7 +414,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .field("double_field", 6.0d) .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC))) .field("boolean_field", true) - .field("binary_field", Base64.encodeBytes("testing text".getBytes("UTF8"))) + .field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8"))) .endObject()).execute().actionGet(); client().admin().indices().prepareRefresh().execute().actionGet(); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index a468517a512..5ef3ad766e6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -21,7 +21,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; public class MustachePlugin extends Plugin { @@ -38,6 +37,6 @@ public class MustachePlugin extends Plugin { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, - MustacheScriptEngineService.NAME, ScriptMode.ON)); + MustacheScriptEngineService.NAME, true)); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 139d8dbf0b8..7097718c6fc 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -59,7 +59,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; @@ -107,7 +106,7 @@ public class TemplateQueryParserTests extends ESTestCase { ScriptModule scriptModule = new ScriptModule(); scriptModule.prepareSettings(settingsModule); // TODO: make this use a mock engine instead of mustache and it will no longer be messy! - scriptModule.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, ScriptMode.ON)); + scriptModule.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, true)); settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), @@ -153,7 +152,7 @@ public class TemplateQueryParserTests extends ESTestCase { }); IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, - similarityService, scriptService, indicesQueriesRegistry, proxy, null, null, null); + similarityService, scriptService, indicesQueriesRegistry, proxy, null, null); } @Override diff --git a/modules/lang-painless/ant.xml b/modules/lang-painless/ant.xml index 1d2bdb678e8..9d3cf9ece91 100644 --- a/modules/lang-painless/ant.xml +++ b/modules/lang-painless/ant.xml @@ -42,10 +42,16 @@ + + + + + + @@ -70,6 +76,7 @@ + @@ -92,6 +99,7 @@ + @@ -121,9 +129,9 @@ - - - + + + diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 97c79845043..2588a646dcb 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -19,6 +19,10 @@ lexer grammar PainlessLexer; +@header { +import org.elasticsearch.painless.Definition; +} + WS: [ \t\n\r]+ -> skip; COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip; @@ -28,7 +32,7 @@ LBRACE: '['; RBRACE: ']'; LP: '('; RP: ')'; -DOT: '.' -> mode(EXT); +DOT: '.' -> mode(AFTER_DOT); COMMA: ','; SEMICOLON: ';'; IF: 'if'; @@ -88,7 +92,7 @@ AUSH: '>>>='; OCTAL: '0' [0-7]+ [lL]?; HEX: '0' [xX] [0-9a-fA-F]+ [lL]?; INTEGER: ( '0' | [1-9] [0-9]* ) [lLfFdD]?; -DECIMAL: ( '0' | [1-9] [0-9]* ) DOT [0-9]* ( [eE] [+\-]? [0-9]+ )? [fF]?; +DECIMAL: ( '0' | [1-9] [0-9]* ) (DOT [0-9]+)? ( [eE] [+\-]? [0-9]+ )? [fF]?; STRING: ( '"' ( '\\"' | '\\\\' | ~[\\"] )*? '"' ) | ( '\'' ( '\\\'' | '\\\\' | ~[\\"] )*? '\'' ); @@ -97,8 +101,16 @@ FALSE: 'false'; NULL: 'null'; +// The predicate here allows us to remove ambiguities when +// dealing with types versus identifiers. We check against +// the current whitelist to determine whether a token is a type +// or not. Note this works by processing one character at a time +// and the rule is added or removed as this happens. This is also known +// as "the lexer hack." See (https://en.wikipedia.org/wiki/The_lexer_hack). +TYPE: ID ( DOT ID )* { Definition.isSimpleType(getText()) }?; ID: [_a-zA-Z] [_a-zA-Z0-9]*; -mode EXT; -EXTINTEGER: ( '0' | [1-9] [0-9]* ) -> mode(DEFAULT_MODE); -EXTID: [_a-zA-Z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE); +mode AFTER_DOT; + +DOTINTEGER: ( '0' | [1-9] [0-9]* ) -> mode(DEFAULT_MODE); +DOTID: [_a-z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE); diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.tokens b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens new file mode 100644 index 00000000000..d42e157e964 --- /dev/null +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens @@ -0,0 +1,137 @@ +WS=1 +COMMENT=2 +LBRACK=3 +RBRACK=4 +LBRACE=5 +RBRACE=6 +LP=7 +RP=8 +DOT=9 +COMMA=10 +SEMICOLON=11 +IF=12 +ELSE=13 +WHILE=14 +DO=15 +FOR=16 +CONTINUE=17 +BREAK=18 +RETURN=19 +NEW=20 +TRY=21 +CATCH=22 +THROW=23 +BOOLNOT=24 +BWNOT=25 +MUL=26 +DIV=27 +REM=28 +ADD=29 +SUB=30 +LSH=31 +RSH=32 +USH=33 +LT=34 +LTE=35 +GT=36 +GTE=37 +EQ=38 +EQR=39 +NE=40 +NER=41 +BWAND=42 +XOR=43 +BWOR=44 +BOOLAND=45 +BOOLOR=46 +COND=47 +COLON=48 +INCR=49 +DECR=50 +ASSIGN=51 +AADD=52 +ASUB=53 +AMUL=54 +ADIV=55 +AREM=56 +AAND=57 +AXOR=58 +AOR=59 +ALSH=60 +ARSH=61 +AUSH=62 +OCTAL=63 +HEX=64 +INTEGER=65 +DECIMAL=66 +STRING=67 +TRUE=68 +FALSE=69 +NULL=70 +TYPE=71 +ID=72 +DOTINTEGER=73 +DOTID=74 +'{'=3 +'}'=4 +'['=5 +']'=6 +'('=7 +')'=8 +'.'=9 +','=10 +';'=11 +'if'=12 +'else'=13 +'while'=14 +'do'=15 +'for'=16 +'continue'=17 +'break'=18 +'return'=19 +'new'=20 +'try'=21 +'catch'=22 +'throw'=23 +'!'=24 +'~'=25 +'*'=26 +'/'=27 +'%'=28 +'+'=29 +'-'=30 +'<<'=31 +'>>'=32 +'>>>'=33 +'<'=34 +'<='=35 +'>'=36 +'>='=37 +'=='=38 +'==='=39 +'!='=40 +'!=='=41 +'&'=42 +'^'=43 +'|'=44 +'&&'=45 +'||'=46 +'?'=47 +':'=48 +'++'=49 +'--'=50 +'='=51 +'+='=52 +'-='=53 +'*='=54 +'/='=55 +'%='=56 +'&='=57 +'^='=58 +'|='=59 +'<<='=60 +'>>='=61 +'>>>='=62 +'true'=68 +'false'=69 +'null'=70 diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index b36dec5afbb..12de27cefd3 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -22,35 +22,37 @@ parser grammar PainlessParser; options { tokenVocab=PainlessLexer; } source - : statement+ EOF + : statement* EOF ; +// Note we use a predicate on the if/else case here to prevent the +// "dangling-else" ambiguity by forcing the 'else' token to be consumed +// as soon as one is found. See (https://en.wikipedia.org/wiki/Dangling_else). statement - : IF LP expression RP block ( ELSE block )? # if - | WHILE LP expression RP ( block | empty ) # while - | DO block WHILE LP expression RP ( SEMICOLON | EOF ) # do - | FOR LP initializer? SEMICOLON expression? SEMICOLON afterthought? RP ( block | empty ) # for - | declaration ( SEMICOLON | EOF ) # decl - | CONTINUE ( SEMICOLON | EOF ) # continue - | BREAK ( SEMICOLON | EOF ) # break - | RETURN expression ( SEMICOLON | EOF ) # return - | TRY block trap+ # try - | THROW expression ( SEMICOLON | EOF ) # throw - | expression ( SEMICOLON | EOF ) # expr + : IF LP expression RP trailer ( ELSE trailer | { _input.LA(1) != ELSE }? ) # if + | WHILE LP expression RP ( trailer | empty ) # while + | DO block WHILE LP expression RP delimiter # do + | FOR LP initializer? SEMICOLON expression? SEMICOLON afterthought? RP ( trailer | empty ) # for + | declaration delimiter # decl + | CONTINUE delimiter # continue + | BREAK delimiter # break + | RETURN expression delimiter # return + | TRY block trap+ # try + | THROW expression delimiter # throw + | expression delimiter # expr + ; + +trailer + : block + | statement ; block - : LBRACK statement+ RBRACK # multiple - | statement # single + : LBRACK statement* RBRACK ; empty - : emptyscope - | SEMICOLON - ; - -emptyscope - : LBRACK RBRACK + : SEMICOLON ; initializer @@ -63,75 +65,96 @@ afterthought ; declaration - : decltype declvar ( COMMA declvar )* + : decltype declvar (COMMA declvar)* ; decltype - : identifier (LBRACE RBRACE)* + : TYPE (LBRACE RBRACE)* ; declvar - : identifier ( ASSIGN expression )? + : ID ( ASSIGN expression )? ; trap - : CATCH LP ( identifier identifier ) RP ( block | emptyscope ) + : CATCH LP TYPE ID RP block ; -identifier - : ID generic? +delimiter + : SEMICOLON + | EOF ; -generic - : LT identifier ( COMMA identifier )* GT +// Note we return the boolean s. This is returned as true +// if secondaries (postfixes) are allowed, otherwise, false. +// This prevents illegal secondaries from being appended to +// expressions using precedence that aren't variable/method chains. +expression returns [boolean s = true] + : u = unary[false] { $s = $u.s; } # single + | expression ( MUL | DIV | REM ) expression { $s = false; } # binary + | expression ( ADD | SUB ) expression { $s = false; } # binary + | expression ( LSH | RSH | USH ) expression { $s = false; } # binary + | expression ( LT | LTE | GT | GTE ) expression { $s = false; } # comp + | expression ( EQ | EQR | NE | NER ) expression { $s = false; } # comp + | expression BWAND expression { $s = false; } # binary + | expression XOR expression { $s = false; } # binary + | expression BWOR expression { $s = false; } # binary + | expression BOOLAND expression { $s = false; } # bool + | expression BOOLOR expression { $s = false; } # bool + | expression COND e0 = expression COLON e1 = expression { $s = $e0.s && $e1.s; } # conditional + // TODO: Should we allow crazy syntax like (x = 5).call()? + // Other crazy syntaxes work, but this one requires + // a complete restructure of the rules as EChain isn't + // designed to handle more postfixes after an assignment. + | chain[true] ( ASSIGN | AADD | ASUB | AMUL | + ADIV | AREM | AAND | AXOR | + AOR | ALSH | ARSH | AUSH ) expression { $s = false; } # assignment ; -expression - : LP expression RP # precedence - | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric - | TRUE # true - | FALSE # false - | NULL # null - | chain ( INCR | DECR ) # postinc - | ( INCR | DECR ) chain # preinc - | chain # read - | ( BOOLNOT | BWNOT | ADD | SUB ) expression # unary - | LP decltype RP expression # cast - | expression ( MUL | DIV | REM ) expression # binary - | expression ( ADD | SUB ) expression # binary - | expression ( LSH | RSH | USH ) expression # binary - | expression ( LT | LTE | GT | GTE ) expression # comp - | expression ( EQ | EQR | NE | NER ) expression # comp - | expression BWAND expression # binary - | expression XOR expression # binary - | expression BWOR expression # binary - | expression BOOLAND expression # bool - | expression BOOLOR expression # bool - | expression COND expression COLON expression # conditional - | chain ( ASSIGN | AADD | ASUB | AMUL | ADIV - | AREM | AAND | AXOR | AOR - | ALSH | ARSH | AUSH ) expression # assignment +// Note we take in the boolean c. This is used to indicate +// whether or not this rule was called when we are already +// processing a variable/method chain. This prevents the chain +// from being applied to rules where it wouldn't be allowed. +unary[boolean c] returns [boolean s = true] + : { !$c }? ( INCR | DECR ) chain[true] # pre + | { !$c }? chain[true] (INCR | DECR ) # post + | { !$c }? chain[false] # read + | { !$c }? ( OCTAL | HEX | INTEGER | DECIMAL ) { $s = false; } # numeric + | { !$c }? TRUE { $s = false; } # true + | { !$c }? FALSE { $s = false; } # false + | { !$c }? NULL { $s = false; } # null + | { !$c }? ( BOOLNOT | BWNOT | ADD | SUB ) unary[false] # operator + | LP decltype RP unary[$c] # cast ; -chain - : linkprec - | linkcast - | linkvar - | linknew - | linkstring +chain[boolean c] + : p = primary[$c] secondary[$p.s]* # dynamic + | decltype dot secondary[true]* # static + | NEW TYPE (LBRACE expression RBRACE)+ (dot secondary[true]*)? # newarray ; -linkprec: LP ( linkprec | linkcast | linkvar | linknew | linkstring ) RP ( linkdot | linkbrace )?; -linkcast: LP decltype RP ( linkprec | linkcast | linkvar | linknew | linkstring ); -linkbrace: LBRACE expression RBRACE ( linkdot | linkbrace )?; -linkdot: DOT ( linkcall | linkfield ); -linkcall: EXTID arguments ( linkdot | linkbrace )?; -linkvar: identifier ( linkdot | linkbrace )?; -linkfield: ( EXTID | EXTINTEGER ) ( linkdot | linkbrace )?; -linknew: NEW identifier ( ( arguments linkdot? ) | ( ( LBRACE expression RBRACE )+ linkdot? ) ); -linkstring: STRING (linkdot | linkbrace )?; +primary[boolean c] returns [boolean s = true] + : { !$c }? LP e = expression RP { $s = $e.s; } # exprprec + | { $c }? LP unary[true] RP # chainprec + | STRING # string + | ID # variable + | NEW TYPE arguments # newobject + ; + +secondary[boolean s] + : { $s }? dot + | { $s }? brace + ; + +dot + : DOT DOTID arguments # callinvoke + | DOT ( DOTID | DOTINTEGER ) # fieldaccess + ; + +brace + : LBRACE expression RBRACE # braceaccess + ; arguments : ( LP ( expression ( COMMA expression )* )? RP ) ; - diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.tokens b/modules/lang-painless/src/main/antlr/PainlessParser.tokens new file mode 100644 index 00000000000..d42e157e964 --- /dev/null +++ b/modules/lang-painless/src/main/antlr/PainlessParser.tokens @@ -0,0 +1,137 @@ +WS=1 +COMMENT=2 +LBRACK=3 +RBRACK=4 +LBRACE=5 +RBRACE=6 +LP=7 +RP=8 +DOT=9 +COMMA=10 +SEMICOLON=11 +IF=12 +ELSE=13 +WHILE=14 +DO=15 +FOR=16 +CONTINUE=17 +BREAK=18 +RETURN=19 +NEW=20 +TRY=21 +CATCH=22 +THROW=23 +BOOLNOT=24 +BWNOT=25 +MUL=26 +DIV=27 +REM=28 +ADD=29 +SUB=30 +LSH=31 +RSH=32 +USH=33 +LT=34 +LTE=35 +GT=36 +GTE=37 +EQ=38 +EQR=39 +NE=40 +NER=41 +BWAND=42 +XOR=43 +BWOR=44 +BOOLAND=45 +BOOLOR=46 +COND=47 +COLON=48 +INCR=49 +DECR=50 +ASSIGN=51 +AADD=52 +ASUB=53 +AMUL=54 +ADIV=55 +AREM=56 +AAND=57 +AXOR=58 +AOR=59 +ALSH=60 +ARSH=61 +AUSH=62 +OCTAL=63 +HEX=64 +INTEGER=65 +DECIMAL=66 +STRING=67 +TRUE=68 +FALSE=69 +NULL=70 +TYPE=71 +ID=72 +DOTINTEGER=73 +DOTID=74 +'{'=3 +'}'=4 +'['=5 +']'=6 +'('=7 +')'=8 +'.'=9 +','=10 +';'=11 +'if'=12 +'else'=13 +'while'=14 +'do'=15 +'for'=16 +'continue'=17 +'break'=18 +'return'=19 +'new'=20 +'try'=21 +'catch'=22 +'throw'=23 +'!'=24 +'~'=25 +'*'=26 +'/'=27 +'%'=28 +'+'=29 +'-'=30 +'<<'=31 +'>>'=32 +'>>>'=33 +'<'=34 +'<='=35 +'>'=36 +'>='=37 +'=='=38 +'==='=39 +'!='=40 +'!=='=41 +'&'=42 +'^'=43 +'|'=44 +'&&'=45 +'||'=46 +'?'=47 +':'=48 +'++'=49 +'--'=50 +'='=51 +'+='=52 +'-='=53 +'*='=54 +'/='=55 +'%='=56 +'&='=57 +'^='=58 +'|='=59 +'<<='=60 +'>>='=61 +'>>>='=62 +'true'=68 +'false'=69 +'null'=70 diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java index f215e39ad01..a06bcdf9840 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Analyzer.java @@ -26,10 +26,9 @@ import org.elasticsearch.painless.node.SSource; * Runs the analysis phase of compilation using the Painless AST. */ final class Analyzer { - static Variables analyze(final CompilerSettings settings, final Definition definition, - final Reserved shortcut, final SSource root) { - final Variables variables = new Variables(settings, definition, shortcut); - root.analyze(settings, definition, variables); + static Variables analyze(Reserved shortcut, SSource root) { + Variables variables = new Variables(shortcut); + root.analyze(variables); return variables; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 3228ff47e92..a16a9818ab3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -20,223 +20,770 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Transform; import org.elasticsearch.painless.Definition.Type; -import java.lang.reflect.InvocationTargetException; - /** * Used during the analysis phase to collect legal type casts and promotions * for type-checking and later to write necessary casts in the bytecode. */ public final class AnalyzerCaster { - public static Cast getLegalCast(final Definition definition, - final String location, final Type actual, final Type expected, final boolean explicit) { - final Cast cast = new Cast(actual, expected, explicit); - + public static Cast getLegalCast(String location, Type actual, Type expected, boolean explicit, boolean internal) { if (actual.equals(expected)) { return null; } - Cast transform = definition.transformsMap.get(cast); + switch (actual.sort) { + case BOOL: + switch (expected.sort) { + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case BOOL_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + } - if (transform == null && explicit) { - transform = definition.transformsMap.get(new Cast(actual, expected, false)); + break; + case BYTE: + switch (expected.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case BYTE_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case SHORT_OBJ: + if (internal) + return new Cast(actual,Definition.SHORT_TYPE, explicit, false, false, false, true); + + break; + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, explicit, false, false, false, true); + + break; + } + + break; + case SHORT: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case SHORT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + } + + break; + case CHAR: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case CHAR_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case STRING: + return new Cast(actual, Definition.STRING_TYPE, explicit, false, false, false, false); + case INT_OBJ: + if (internal) + return new Cast(actual, Definition.INT_TYPE, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + } + + break; + case INT: + switch (expected.sort) { + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case INT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case LONG_OBJ: + if (internal) + return new Cast(actual, Definition.LONG_TYPE, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + } + + break; + case LONG: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + case INT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case LONG_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case FLOAT_OBJ: + if (internal) + return new Cast(actual, Definition.FLOAT_TYPE, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + } + + break; + case FLOAT: + switch (expected.sort) { + case DOUBLE: + return new Cast(actual, expected, explicit); + case BYTE: + case SHORT: + case CHAR: + case INT: + case FLOAT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case FLOAT_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, Definition.DOUBLE_TYPE, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + case LONG_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.LONG_TYPE, true, false, false, false, true); + + break; + } + + break; + case DOUBLE: + switch (expected.sort) { + case BYTE: + case SHORT: + case CHAR: + case INT: + case FLOAT: + if (explicit) + return new Cast(actual, expected, true); + + break; + case DEF: + return new Cast(actual, Definition.DEF_TYPE, explicit, false, false, true, false); + case OBJECT: + case NUMBER: + case DOUBLE_OBJ: + if (internal) + return new Cast(actual, actual, explicit, false, false, false, true); + + break; + case BYTE_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.BYTE_TYPE, true, false, false, false, true); + + break; + case SHORT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.SHORT_TYPE, true, false, false, false, true); + + break; + case CHAR_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.CHAR_TYPE, true, false, false, false, true); + + break; + case INT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.INT_TYPE, true, false, false, false, true); + + break; + case LONG_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.LONG_TYPE, true, false, false, false, true); + + break; + case FLOAT_OBJ: + if (explicit && internal) + return new Cast(actual, Definition.FLOAT_TYPE, true, false, false, false, true); + + break; + } + + break; + case OBJECT: + case NUMBER: + switch (expected.sort) { + case BYTE: + if (internal && explicit) + return new Cast(actual, Definition.BYTE_OBJ_TYPE, true, false, true, false, false); + + break; + case SHORT: + if (internal && explicit) + return new Cast(actual, Definition.SHORT_OBJ_TYPE, true, false, true, false, false); + + break; + case CHAR: + if (internal && explicit) + return new Cast(actual, Definition.CHAR_OBJ_TYPE, true, false, true, false, false); + + break; + case INT: + if (internal && explicit) + return new Cast(actual, Definition.INT_OBJ_TYPE, true, false, true, false, false); + + break; + case LONG: + if (internal && explicit) + return new Cast(actual, Definition.LONG_OBJ_TYPE, true, false, true, false, false); + + break; + case FLOAT: + if (internal && explicit) + return new Cast(actual, Definition.FLOAT_OBJ_TYPE, true, false, true, false, false); + + break; + case DOUBLE: + if (internal && explicit) + return new Cast(actual, Definition.DOUBLE_OBJ_TYPE, true, false, true, false, false); + + break; + } + + break; + case BOOL_OBJ: + switch (expected.sort) { + case BOOL: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + } + + break; + case BYTE_OBJ: + switch (expected.sort) { + case BYTE: + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case SHORT_OBJ: + switch (expected.sort) { + case SHORT: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case CHAR_OBJ: + switch (expected.sort) { + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case INT_OBJ: + switch (expected.sort) { + case INT: + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case LONG_OBJ: + switch (expected.sort) { + case LONG: + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case FLOAT_OBJ: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case DOUBLE_OBJ: + switch (expected.sort) { + case FLOAT: + case DOUBLE: + if (internal) + return new Cast(actual, expected, explicit, true, false, false, false); + + break; + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + if (internal && explicit) + return new Cast(actual, expected, true, true, false, false, false); + + break; + } + + break; + case DEF: + switch (expected.sort) { + case BOOL: + case BYTE: + case SHORT: + case CHAR: + case INT: + case LONG: + case FLOAT: + case DOUBLE: + return new Cast(actual, expected, explicit, true, false, false, false); + } + + break; + case STRING: + switch (expected.sort) { + case CHAR: + if (explicit) + return new Cast(actual, expected, true, false, false, false, false); + + break; + } + + break; } - if (transform != null) { - return transform; - } - - if (expected.clazz.isAssignableFrom(actual.clazz) || - ((explicit || expected.sort == Sort.DEF) && actual.clazz.isAssignableFrom(expected.clazz))) { - return cast; + if (actual.sort == Sort.DEF || expected.sort == Sort.DEF || + expected.clazz.isAssignableFrom(actual.clazz) || + explicit && actual.clazz.isAssignableFrom(expected.clazz)) { + return new Cast(actual, expected, explicit); } else { throw new ClassCastException("Error" + location + ": Cannot cast from [" + actual.name + "] to [" + expected.name + "]."); } } public static Object constCast(final String location, final Object constant, final Cast cast) { - if (cast instanceof Transform) { - final Transform transform = (Transform)cast; - return invokeTransform(location, transform, constant); + final Sort fsort = cast.from.sort; + final Sort tsort = cast.to.sort; + + if (fsort == tsort) { + return constant; + } else if (fsort == Sort.STRING && tsort == Sort.CHAR) { + return Utility.StringTochar((String)constant); + } else if (fsort == Sort.CHAR && tsort == Sort.STRING) { + return Utility.charToString((char)constant); + } else if (fsort.numeric && tsort.numeric) { + final Number number; + + if (fsort == Sort.CHAR) { + number = (int)(char)constant; + } else { + number = (Number)constant; + } + + switch (tsort) { + case BYTE: return number.byteValue(); + case SHORT: return number.shortValue(); + case CHAR: return (char)number.intValue(); + case INT: return number.intValue(); + case LONG: return number.longValue(); + case FLOAT: return number.floatValue(); + case DOUBLE: return number.doubleValue(); + default: + throw new IllegalStateException("Error" + location + ": Cannot cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); + } } else { - final Sort fsort = cast.from.sort; - final Sort tsort = cast.to.sort; - - if (fsort == tsort) { - return constant; - } else if (fsort.numeric && tsort.numeric) { - Number number; - - if (fsort == Sort.CHAR) { - number = (int)(char)constant; - } else { - number = (Number)constant; - } - - switch (tsort) { - case BYTE: return number.byteValue(); - case SHORT: return number.shortValue(); - case CHAR: return (char)number.intValue(); - case INT: return number.intValue(); - case LONG: return number.longValue(); - case FLOAT: return number.floatValue(); - case DOUBLE: return number.doubleValue(); - default: - throw new IllegalStateException("Error" + location + ": Cannot cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); - } - } else { - throw new IllegalStateException("Error" + location + ": Cannot cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); - } + throw new IllegalStateException("Error" + location + ": Cannot cast from " + + "[" + cast.from.clazz.getCanonicalName() + "] to [" + cast.to.clazz.getCanonicalName() + "]."); } } - private static Object invokeTransform(final String location, final Transform transform, final Object object) { - final Method method = transform.method; - final java.lang.reflect.Method jmethod = method.reflect; - final int modifiers = jmethod.getModifiers(); - - try { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - return jmethod.invoke(null, object); - } else { - return jmethod.invoke(object); - } - } catch (final IllegalAccessException | IllegalArgumentException | - InvocationTargetException | NullPointerException | ExceptionInInitializerError exception) { - throw new ClassCastException( - "Error" + location + ": Cannot cast from [" + transform.from.name + "] to [" + transform.to.name + "]."); - } - } - - public static Type promoteNumeric(final Definition definition, final Type from, final boolean decimal, final boolean primitive) { + public static Type promoteNumeric(Type from, boolean decimal) { final Sort sort = from.sort; if (sort == Sort.DEF) { - return definition.defType; - } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ) && decimal) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort == Sort.INT || sort == Sort.INT_OBJ || - sort == Sort.CHAR || sort == Sort.CHAR_OBJ || - sort == Sort.SHORT || sort == Sort.SHORT_OBJ || - sort == Sort.BYTE || sort == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; + return Definition.DEF_TYPE; + } else if ((sort == Sort.DOUBLE) && decimal) { + return Definition.DOUBLE_TYPE; + } else if ((sort == Sort.FLOAT) && decimal) { + return Definition.FLOAT_TYPE; + } else if (sort == Sort.LONG) { + return Definition.LONG_TYPE; + } else if (sort == Sort.INT || sort == Sort.CHAR || sort == Sort.SHORT || sort == Sort.BYTE) { + return Definition.INT_TYPE; } return null; } - public static Type promoteNumeric(final Definition definition, - final Type from0, final Type from1, final boolean decimal, final boolean primitive) { + public static Type promoteNumeric(Type from0, Type from1, boolean decimal) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } if (decimal) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || - sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; + if (sort0 == Sort.DOUBLE || sort1 == Sort.DOUBLE) { + return Definition.DOUBLE_TYPE; + } else if (sort0 == Sort.FLOAT || sort1 == Sort.FLOAT) { + return Definition.FLOAT_TYPE; } } - if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || - sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { - return primitive ? definition.longType : definition.longobjType; - } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ || - sort1 == Sort.INT || sort1 == Sort.INT_OBJ || - sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ || - sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ || - sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ || - sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ || - sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ || - sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; + if (sort0 == Sort.LONG || sort1 == Sort.LONG) { + return Definition.LONG_TYPE; + } else if (sort0 == Sort.INT || sort1 == Sort.INT || + sort0 == Sort.CHAR || sort1 == Sort.CHAR || + sort0 == Sort.SHORT || sort1 == Sort.SHORT || + sort0 == Sort.BYTE || sort1 == Sort.BYTE) { + return Definition.INT_TYPE; } return null; } - public static Type promoteAdd(final Definition definition, final Type from0, final Type from1) { + public static Type promoteAdd(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.STRING || sort1 == Sort.STRING) { - return definition.stringType; + return Definition.STRING_TYPE; } - return promoteNumeric(definition, from0, from1, true, true); + return promoteNumeric(from0, from1, true); } - public static Type promoteXor(final Definition definition, final Type from0, final Type from1) { + public static Type promoteXor(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0.bool || sort1.bool) { - return definition.booleanType; + return Definition.BOOLEAN_TYPE; } - return promoteNumeric(definition, from0, from1, false, true); + return promoteNumeric(from0, from1, false); } - public static Type promoteEquality(final Definition definition, final Type from0, final Type from1) { + public static Type promoteEquality(final Type from0, final Type from1) { final Sort sort0 = from0.sort; final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; - } - - final boolean primitive = sort0.primitive && sort1.primitive; - - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - return promoteNumeric(definition, from0, from1, true, primitive); - } - - return definition.objectType; - } - - public static Type promoteReference(final Definition definition, final Type from0, final Type from1) { - final Sort sort0 = from0.sort; - final Sort sort1 = from1.sort; - - if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } if (sort0.primitive && sort1.primitive) { if (sort0.bool && sort1.bool) { - return definition.booleanType; + return Definition.BOOLEAN_TYPE; } if (sort0.numeric && sort1.numeric) { - return promoteNumeric(definition, from0, from1, true, true); + return promoteNumeric(from0, from1, true); } } - return definition.objectType; + return Definition.OBJECT_TYPE; } - public static Type promoteConditional(final Definition definition, - final Type from0, final Type from1, final Object const0, final Object const1) { + public static Type promoteConditional(final Type from0, final Type from1, final Object const0, final Object const1) { if (from0.equals(from1)) { return from0; } @@ -245,126 +792,124 @@ public final class AnalyzerCaster { final Sort sort1 = from1.sort; if (sort0 == Sort.DEF || sort1 == Sort.DEF) { - return definition.defType; + return Definition.DEF_TYPE; } - final boolean primitive = sort0.primitive && sort1.primitive; + if (sort0.primitive && sort1.primitive) { + if (sort0.bool && sort1.bool) { + return Definition.BOOLEAN_TYPE; + } - if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.booleanobjType; - } - - if (sort0.numeric && sort1.numeric) { - if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ) { - return primitive ? definition.doubleType : definition.doubleobjType; - } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { - return primitive ? definition.floatType : definition.floatobjType; - } else if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ) { - return sort0.primitive && sort1.primitive ? definition.longType : definition.longobjType; + if (sort0 == Sort.DOUBLE || sort1 == Sort.DOUBLE) { + return Definition.DOUBLE_TYPE; + } else if (sort0 == Sort.FLOAT || sort1 == Sort.FLOAT) { + return Definition.FLOAT_TYPE; + } else if (sort0 == Sort.LONG || sort1 == Sort.LONG) { + return Definition.LONG_TYPE; } else { - if (sort0 == Sort.BYTE || sort0 == Sort.BYTE_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.byteType : definition.byteobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + if (sort0 == Sort.BYTE) { + if (sort1 == Sort.BYTE) { + return Definition.BYTE_TYPE; + } else if (sort1 == Sort.SHORT) { if (const1 != null) { final short constant = (short)const1; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.SHORT || sort0 == Sort.SHORT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + } else if (sort0 == Sort.SHORT) { + if (sort1 == Sort.BYTE) { if (const0 != null) { final short constant = (short)const0; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.shortType : definition.shortobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.SHORT) { + return Definition.SHORT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.shortType : definition.shortobjType; + return Definition.SHORT_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.CHAR || sort0 == Sort.CHAR_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { - return primitive ? definition.charType : definition.charobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { + } else if (sort0 == Sort.CHAR) { + if (sort1 == Sort.BYTE) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.SHORT) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.CHAR) { + return Definition.CHAR_TYPE; + } else if (sort1 == Sort.INT) { if (const1 != null) { final int constant = (int)const1; if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; } - } else if (sort0 == Sort.INT || sort0 == Sort.INT_OBJ) { - if (sort1 == Sort.BYTE || sort1 == Sort.BYTE_OBJ) { + } else if (sort0 == Sort.INT) { + if (sort1 == Sort.BYTE) { if (const0 != null) { final int constant = (int)const0; if (constant <= Byte.MAX_VALUE && constant >= Byte.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.SHORT || sort1 == Sort.SHORT_OBJ) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.SHORT) { if (const0 != null) { final int constant = (int)const0; if (constant <= Short.MAX_VALUE && constant >= Short.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.CHAR || sort1 == Sort.CHAR_OBJ) { + return Definition.INT_TYPE; + } else if (sort1 == Sort.CHAR) { if (const0 != null) { final int constant = (int)const0; if (constant <= Character.MAX_VALUE && constant >= Character.MIN_VALUE) { - return primitive ? definition.byteType : definition.byteobjType; + return Definition.BYTE_TYPE; } } - return primitive ? definition.intType : definition.intobjType; - } else if (sort1 == Sort.INT || sort1 == Sort.INT_OBJ) { - return primitive ? definition.intType : definition.intobjType; + return Definition.INT_TYPE; + } else if (sort1 == Sort.INT) { + return Definition.INT_TYPE; } } } @@ -374,7 +919,7 @@ public final class AnalyzerCaster { // to calculate the highest upper bound for the two types and return that. // However, for now we just return objectType that may require an extra cast. - return definition.objectType; + return Definition.OBJECT_TYPE; } private AnalyzerCaster() {} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index c487dddba71..666f28bb406 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -57,7 +57,7 @@ final class Compiler { try { // Setup the code privileges. CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); - } catch (final MalformedURLException impossible) { + } catch (MalformedURLException impossible) { throw new RuntimeException(impossible); } } @@ -69,7 +69,7 @@ final class Compiler { /** * @param parent The parent ClassLoader. */ - Loader(final ClassLoader parent) { + Loader(ClassLoader parent) { super(parent); } @@ -79,7 +79,7 @@ final class Compiler { * @param bytes The generated byte code. * @return A Class object extending {@link Executable}. */ - Class define(final String name, final byte[] bytes) { + Class define(String name, byte[] bytes) { return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(Executable.class); } } @@ -92,7 +92,7 @@ final class Compiler { * @param settings The CompilerSettings to be used during the compilation. * @return An {@link Executable} Painless script. */ - static Executable compile(final Loader loader, final String name, final String source, final CompilerSettings settings) { + static Executable compile(Loader loader, String name, String source, CompilerSettings settings) { byte[] bytes = compile(name, source, settings); return createExecutable(loader, name, source, bytes); @@ -111,11 +111,11 @@ final class Compiler { " plugin if a script longer than this length is a requirement."); } - final Reserved reserved = new Reserved(); - final SSource root = Walker.buildPainlessTree(source, reserved); - final Variables variables = Analyzer.analyze(settings, Definition.INSTANCE, reserved, root); + Reserved reserved = new Reserved(); + SSource root = Walker.buildPainlessTree(source, reserved, settings); + Variables variables = Analyzer.analyze(reserved, root); - return Writer.write(settings, Definition.INSTANCE, name, source, variables, root); + return Writer.write(settings, name, source, variables, root); } /** @@ -126,13 +126,13 @@ final class Compiler { * @param bytes The ASM generated byte code to define the class with. * @return A Painless {@link Executable} script. */ - private static Executable createExecutable(final Loader loader, final String name, final String source, final byte[] bytes) { + private static Executable createExecutable(Loader loader, String name, String source, byte[] bytes) { try { - final Class clazz = loader.define(CLASS_NAME, bytes); - final java.lang.reflect.Constructor constructor = clazz.getConstructor(String.class, String.class); + Class clazz = loader.define(CLASS_NAME, bytes); + java.lang.reflect.Constructor constructor = clazz.getConstructor(String.class, String.class); return constructor.newInstance(name, source); - } catch (final Exception exception) { // Catch everything to let the user know this is something caused internally. + } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( "An internal error occurred attempting to define the script [" + name + "].", exception); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java index 9ec26bf345a..79b75b32b98 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java @@ -24,20 +24,15 @@ package org.elasticsearch.painless; */ public final class CompilerSettings { - /** - * Constant to be used when specifying numeric overflow when compiling a script. - */ - public static final String NUMERIC_OVERFLOW = "numeric_overflow"; - /** * Constant to be used when specifying the maximum loop counter when compiling a script. */ public static final String MAX_LOOP_COUNTER = "max_loop_counter"; /** - * Whether or not to allow numeric values to overflow without exception. + * Constant to be used for enabling additional internal compilation checks (slower). */ - private boolean numericOverflow = true; + public static final String PICKY = "picky"; /** * The maximum number of statements allowed to be run in a loop. @@ -45,25 +40,10 @@ public final class CompilerSettings { private int maxLoopCounter = 10000; /** - * Returns {@code true} if numeric operations should overflow, {@code false} - * if they should signal an exception. - *

- * If this value is {@code true} (default), then things behave like java: - * overflow for integer types can result in unexpected values / unexpected - * signs, and overflow for floating point types can result in infinite or - * {@code NaN} values. + * Whether to throw exception on ambiguity or other internal parsing issues. This option + * makes things slower too, it is only for debugging. */ - public final boolean getNumericOverflow() { - return numericOverflow; - } - - /** - * Set {@code true} for numerics to overflow, false to deliver exceptions. - * @see #getNumericOverflow - */ - public final void setNumericOverflow(boolean allow) { - this.numericOverflow = allow; - } + private boolean picky = false; /** * Returns the value for the cumulative total number of statements that can be made in all loops @@ -81,4 +61,21 @@ public final class CompilerSettings { public final void setMaxLoopCounter(int max) { this.maxLoopCounter = max; } + + /** + * Returns true if the compiler should be picky. This means it runs slower and enables additional + * runtime checks, throwing an exception if there are ambiguities in the grammar or other low level + * parsing problems. + */ + public boolean isPicky() { + return picky; + } + + /** + * Set to true if compilation should be picky. + * @see #isPicky + */ + public void setPicky(boolean picky) { + this.picky = picky; + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 9226fc3f098..bd8e09f504b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -52,9 +52,10 @@ public final class Def { // TODO: Once Java has a factory for those in java.lang.invoke.MethodHandles, use it: /** Helper class for isolating MethodHandles and methods to get the length of arrays - * (to emulate a "arraystore" byteoode using MethodHandles). + * (to emulate a "arraystore" bytecode using MethodHandles). * This should really be a method in {@link MethodHandles} class! */ + @SuppressWarnings("unused") // getArrayLength() methods are are actually used, javac just does not know :) private static final class ArrayLengthHelper { private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); @@ -134,17 +135,16 @@ public final class Def { * @param receiverClass Class of the object to invoke the method on. * @param name Name of the method. * @param type Callsite signature. Need not match exactly, except the number of parameters. - * @param definition Whitelist to check. * @return pointer to matching method to invoke. never returns null. * @throws IllegalArgumentException if no matching whitelisted method was found. */ - static MethodHandle lookupMethod(Class receiverClass, String name, MethodType type, Definition definition) { + static MethodHandle lookupMethod(Class receiverClass, String name, MethodType type) { // we don't consider receiver an argument/counting towards arity type = type.dropParameterTypes(0, 1); Definition.MethodKey key = new Definition.MethodKey(name, type.parameterCount()); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { Method method = struct.methods.get(key); @@ -154,7 +154,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { Method method = struct.methods.get(key); @@ -192,14 +192,13 @@ public final class Def { *

* @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. - * @param definition Whitelist to check. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupGetter(Class receiverClass, String name, Definition definition) { + static MethodHandle lookupGetter(Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -209,7 +208,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -263,14 +262,13 @@ public final class Def { *

* @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. - * @param definition Whitelist to check. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupSetter(Class receiverClass, String name, Definition definition) { + static MethodHandle lookupSetter(Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = definition.runtimeMap.get(clazz); + RuntimeClass struct = Definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -280,7 +278,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = definition.runtimeMap.get(iface); + struct = Definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -971,6 +969,10 @@ public final class Def { // Conversion methods for Def to primitive types. + public static boolean DefToboolean(final Object value) { + return (boolean)value; + } + public static byte DefTobyteImplicit(final Object value) { return (byte)value; } @@ -1051,79 +1053,6 @@ public final class Def { } } - public static Byte DefToByteImplicit(final Object value) { - return (Byte)value; - } - - public static Short DefToShortImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte) { - return ((Byte)value).shortValue(); - } else { - return (Short)value; - } - } - - public static Character DefToCharacterImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte) { - return (char)(byte)value; - } else { - return (Character)value; - } - } - - public static Integer DefToIntegerImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short) { - return ((Number)value).intValue(); - } else if (value instanceof Character) { - return (int)(char)value; - } else { - return (Integer)value; - } - } - - public static Long DefToLongImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || value instanceof Integer) { - return ((Number)value).longValue(); - } else if (value instanceof Character) { - return (long)(char)value; - } else { - return (Long)value; - } - } - - public static Float DefToFloatImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || value instanceof Integer || value instanceof Long) { - return ((Number)value).floatValue(); - } else if (value instanceof Character) { - return (float)(char)value; - } else { - return (Float)value; - } - } - - public static Double DefToDoubleImplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Byte || value instanceof Short || - value instanceof Integer || value instanceof Long || value instanceof Float) { - return ((Number)value).doubleValue(); - } else if (value instanceof Character) { - return (double)(char)value; - } else { - return (Double)value; - } - } - public static byte DefTobyteExplicit(final Object value) { if (value instanceof Character) { return (byte)(char)value; @@ -1179,74 +1108,4 @@ public final class Def { return ((Number)value).doubleValue(); } } - - public static Byte DefToByteExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (byte)(char)value; - } else { - return ((Number)value).byteValue(); - } - } - - public static Short DefToShortExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (short)(char)value; - } else { - return ((Number)value).shortValue(); - } - } - - public static Character DefToCharacterExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return ((Character)value); - } else { - return (char)((Number)value).intValue(); - } - } - - public static Integer DefToIntegerExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (int)(char)value; - } else { - return ((Number)value).intValue(); - } - } - - public static Long DefToLongExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (long)(char)value; - } else { - return ((Number)value).longValue(); - } - } - - public static Float DefToFloatExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (float)(char)value; - } else { - return ((Number)value).floatValue(); - } - } - - public static Double DefToDoubleExplicit(final Object value) { - if (value == null) { - return null; - } else if (value instanceof Character) { - return (double)(char)value; - } else { - return ((Number)value).doubleValue(); - } - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 380f5455ab3..40b9cc6cbe8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -94,11 +94,11 @@ public final class DefBootstrap { private static MethodHandle lookup(int flavor, Class clazz, String name, MethodType type) { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(clazz, name, type, Definition.INSTANCE); + return Def.lookupMethod(clazz, name, type); case LOAD: - return Def.lookupGetter(clazz, name, Definition.INSTANCE); + return Def.lookupGetter(clazz, name); case STORE: - return Def.lookupSetter(clazz, name, Definition.INSTANCE); + return Def.lookupSetter(clazz, name); case ARRAY_LOAD: return Def.lookupArrayLoad(clazz); case ARRAY_STORE: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index f44db5cb504..ac8e7b89b79 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -19,34 +19,58 @@ package org.elasticsearch.painless; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.ScriptDocValues; - +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.LineNumberReader; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.util.ArrayList; +import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; /** * The entire API for Painless. Also used as a whitelist for checking for legal * methods and fields during at both compile-time and runtime. */ public final class Definition { + + private static final List DEFINITION_FILES = Collections.unmodifiableList( + Arrays.asList("org.elasticsearch.txt", + "java.lang.txt", + "java.math.txt", + "java.text.txt", + "java.util.txt", + "java.util.function.txt", + "java.util.stream.txt")); - /** - * The default language API to be used with Painless. The second construction is used - * to finalize all the variables, so there is no mistake of modification afterwards. - */ - static Definition INSTANCE = new Definition(new Definition()); + private static final Definition INSTANCE = new Definition(); + + /** Some native types as constants: */ + public static final Type VOID_TYPE = getType("void"); + public static final Type BOOLEAN_TYPE = getType("boolean"); + public static final Type BOOLEAN_OBJ_TYPE = getType("Boolean"); + public static final Type BYTE_TYPE = getType("byte"); + public static final Type BYTE_OBJ_TYPE = getType("Byte"); + public static final Type SHORT_TYPE = getType("short"); + public static final Type SHORT_OBJ_TYPE = getType("Short"); + public static final Type INT_TYPE = getType("int"); + public static final Type INT_OBJ_TYPE = getType("Integer"); + public static final Type LONG_TYPE = getType("long"); + public static final Type LONG_OBJ_TYPE = getType("Long"); + public static final Type FLOAT_TYPE = getType("float"); + public static final Type FLOAT_OBJ_TYPE = getType("Float"); + public static final Type DOUBLE_TYPE = getType("double"); + public static final Type DOUBLE_OBJ_TYPE = getType("Double"); + public static final Type CHAR_TYPE = getType("char"); + public static final Type CHAR_OBJ_TYPE = getType("Character"); + public static final Type OBJECT_TYPE = getType("Object"); + public static final Type DEF_TYPE = getType("def"); + public static final Type STRING_TYPE = getType("String"); + public static final Type EXCEPTION_TYPE = getType("Exception"); public enum Sort { VOID( void.class , 0 , true , false , false , false ), @@ -141,15 +165,13 @@ public final class Definition { public final Struct owner; public final List arguments; public final org.objectweb.asm.commons.Method method; - public final java.lang.reflect.Constructor reflect; - private Constructor(final String name, final Struct owner, final List arguments, - final org.objectweb.asm.commons.Method method, final java.lang.reflect.Constructor reflect) { + private Constructor(String name, Struct owner, List arguments, + org.objectweb.asm.commons.Method method) { this.name = name; this.owner = owner; this.arguments = Collections.unmodifiableList(arguments); this.method = method; - this.reflect = reflect; } } @@ -159,18 +181,17 @@ public final class Definition { public final Type rtn; public final List arguments; public final org.objectweb.asm.commons.Method method; - public final java.lang.reflect.Method reflect; + public final int modifiers; public final MethodHandle handle; - private Method(final String name, final Struct owner, final Type rtn, final List arguments, - final org.objectweb.asm.commons.Method method, final java.lang.reflect.Method reflect, - final MethodHandle handle) { + private Method(String name, Struct owner, Type rtn, List arguments, + org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { this.name = name; this.owner = owner; this.rtn = rtn; this.arguments = Collections.unmodifiableList(arguments); this.method = method; - this.reflect = reflect; + this.modifiers = modifiers; this.handle = handle; } } @@ -178,19 +199,18 @@ public final class Definition { public static final class Field { public final String name; public final Struct owner; - public final Type generic; public final Type type; - public final java.lang.reflect.Field reflect; - public final MethodHandle getter; - public final MethodHandle setter; + public final String javaName; + public final int modifiers; + private final MethodHandle getter; + private final MethodHandle setter; - private Field(final String name, final Struct owner, final Type generic, final Type type, - final java.lang.reflect.Field reflect, final MethodHandle getter, final MethodHandle setter) { + private Field(String name, String javaName, Struct owner, Type type, int modifiers, MethodHandle getter, MethodHandle setter) { this.name = name; + this.javaName = javaName; this.owner = owner; - this.generic = generic; this.type = type; - this.reflect = reflect; + this.modifiers = modifiers; this.getter = getter; this.setter = setter; } @@ -289,6 +309,10 @@ public final class Definition { members = Collections.unmodifiableMap(struct.members); } + private Struct freeze() { + return new Struct(this); + } + @Override public boolean equals(Object object) { if (this == object) { @@ -314,50 +338,32 @@ public final class Definition { public final Type from; public final Type to; public final boolean explicit; + public final boolean unboxFrom; + public final boolean unboxTo; + public final boolean boxFrom; + public final boolean boxTo; public Cast(final Type from, final Type to, final boolean explicit) { this.from = from; this.to = to; this.explicit = explicit; + this.unboxFrom = false; + this.unboxTo = false; + this.boxFrom = false; + this.boxTo = false; } - @Override - public boolean equals(final Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - final Cast cast = (Cast)object; - - return from.equals(cast.from) && to.equals(cast.to) && explicit == cast.explicit; + public Cast(final Type from, final Type to, final boolean explicit, + final boolean unboxFrom, final boolean unboxTo, final boolean boxFrom, final boolean boxTo) { + this.from = from; + this.to = to; + this.explicit = explicit; + this.unboxFrom = unboxFrom; + this.unboxTo = unboxTo; + this.boxFrom = boxFrom; + this.boxTo = boxTo; } - @Override - public int hashCode() { - int result = from.hashCode(); - result = 31 * result + to.hashCode(); - result = 31 * result + (explicit ? 1 : 0); - - return result; - } - } - - public static final class Transform extends Cast { - public final Method method; - public final Type upcast; - public final Type downcast; - - public Transform(final Cast cast, Method method, final Type upcast, final Type downcast) { - super(cast.from, cast.to, cast.explicit); - - this.method = method; - this.upcast = upcast; - this.downcast = downcast; - } } public static final class RuntimeClass { @@ -367,1228 +373,174 @@ public final class Definition { private RuntimeClass(final Map methods, final Map getters, final Map setters) { - this.methods = methods; - this.getters = getters; - this.setters = setters; + this.methods = Collections.unmodifiableMap(methods); + this.getters = Collections.unmodifiableMap(getters); + this.setters = Collections.unmodifiableMap(setters); } } - public final Map structsMap; - public final Map transformsMap; - public final Map, RuntimeClass> runtimeMap; + /** Returns whether or not a non-array type exists. */ + public static boolean isSimpleType(final String name) { + return INSTANCE.structsMap.containsKey(name); + } - public final Type voidType; - public final Type booleanType; - public final Type byteType; - public final Type shortType; - public final Type charType; - public final Type intType; - public final Type longType; - public final Type floatType; - public final Type doubleType; + /** Returns whether or not a type exists without an exception. */ + public static boolean isType(final String name) { + try { + INSTANCE.getTypeInternal(name); - public final Type voidobjType; - public final Type booleanobjType; - public final Type byteobjType; - public final Type shortobjType; - public final Type charobjType; - public final Type intobjType; - public final Type longobjType; - public final Type floatobjType; - public final Type doubleobjType; + return true; + } catch (IllegalArgumentException exception) { + return false; + } + } - public final Type objectType; - public final Type defType; - public final Type numberType; - public final Type charseqType; - public final Type stringType; - public final Type mathType; - public final Type utilityType; - public final Type defobjType; + /** Gets the type given by its name */ + public static Type getType(final String name) { + return INSTANCE.getTypeInternal(name); + } - public final Type itrType; - public final Type oitrType; - public final Type sitrType; + /** Creates an array type from the given Struct. */ + public static Type getType(final Struct struct, final int dimensions) { + return INSTANCE.getTypeInternal(struct, dimensions); + } - public final Type collectionType; - public final Type ocollectionType; - public final Type scollectionType; + public static RuntimeClass getRuntimeClass(Class clazz) { + return INSTANCE.runtimeMap.get(clazz); + } - public final Type listType; - public final Type arraylistType; - public final Type olistType; - public final Type oarraylistType; - public final Type slistType; - public final Type sarraylistType; + // INTERNAL IMPLEMENTATION: - public final Type setType; - public final Type hashsetType; - public final Type osetType; - public final Type ohashsetType; - public final Type ssetType; - public final Type shashsetType; - - public final Type mapType; - public final Type hashmapType; - public final Type oomapType; - public final Type oohashmapType; - public final Type smapType; - public final Type shashmapType; - public final Type somapType; - public final Type sohashmapType; - - public final Type execType; - - public final Type exceptionType; - public final Type arithexcepType; - public final Type iargexcepType; - public final Type istateexceptType; - public final Type nfexcepType; - - // docvalues accessors - public final Type geoPointType; - public final Type stringsType; - // TODO: add ReadableDateTime? or don't expose the joda stuff? - public final Type longsType; - public final Type doublesType; - public final Type geoPointsType; - - // for testing features not currently "used" by the whitelist (we should not rush the API for that!) - public final Type featureTestType; + private final Map, RuntimeClass> runtimeMap; + private final Map structsMap; + private final Map simpleTypesMap; private Definition() { structsMap = new HashMap<>(); - transformsMap = new HashMap<>(); + simpleTypesMap = new HashMap<>(); runtimeMap = new HashMap<>(); - addStructs(); - - voidType = getType("void"); - booleanType = getType("boolean"); - byteType = getType("byte"); - shortType = getType("short"); - charType = getType("char"); - intType = getType("int"); - longType = getType("long"); - floatType = getType("float"); - doubleType = getType("double"); - - voidobjType = getType("Void"); - booleanobjType = getType("Boolean"); - byteobjType = getType("Byte"); - shortobjType = getType("Short"); - charobjType = getType("Character"); - intobjType = getType("Integer"); - longobjType = getType("Long"); - floatobjType = getType("Float"); - doubleobjType = getType("Double"); - - objectType = getType("Object"); - defType = getType("def"); - numberType = getType("Number"); - charseqType = getType("CharSequence"); - stringType = getType("String"); - mathType = getType("Math"); - utilityType = getType("Utility"); - defobjType = getType("Def"); - - itrType = getType("Iterator"); - oitrType = getType("Iterator"); - sitrType = getType("Iterator"); - - collectionType = getType("Collection"); - ocollectionType = getType("Collection"); - scollectionType = getType("Collection"); - - listType = getType("List"); - arraylistType = getType("ArrayList"); - olistType = getType("List"); - oarraylistType = getType("ArrayList"); - slistType = getType("List"); - sarraylistType = getType("ArrayList"); - - setType = getType("Set"); - hashsetType = getType("HashSet"); - osetType = getType("Set"); - ohashsetType = getType("HashSet"); - ssetType = getType("Set"); - shashsetType = getType("HashSet"); - - mapType = getType("Map"); - hashmapType = getType("HashMap"); - oomapType = getType("Map"); - oohashmapType = getType("HashMap"); - smapType = getType("Map"); - shashmapType = getType("HashMap"); - somapType = getType("Map"); - sohashmapType = getType("HashMap"); - - execType = getType("Executable"); - - exceptionType = getType("Exception"); - arithexcepType = getType("ArithmeticException"); - iargexcepType = getType("IllegalArgumentException"); - istateexceptType = getType("IllegalStateException"); - nfexcepType = getType("NumberFormatException"); - - geoPointType = getType("GeoPoint"); - stringsType = getType("Strings"); - longsType = getType("Longs"); - doublesType = getType("Doubles"); - geoPointsType = getType("GeoPoints"); - - featureTestType = getType("FeatureTest"); - + // parse the classes and return hierarchy (map of class name -> superclasses/interfaces) + Map> hierarchy = addStructs(); + // add every method for each class addElements(); - copyStructs(); - addTransforms(); - addRuntimeClasses(); - } - - private Definition(final Definition definition) { - final Map structs = new HashMap<>(); - - for (final Struct struct : definition.structsMap.values()) { - structs.put(struct.name, new Struct(struct)); + // apply hierarchy: this means e.g. copying Object's methods into String (thats how subclasses work) + for (Map.Entry> clazz : hierarchy.entrySet()) { + copyStruct(clazz.getKey(), clazz.getValue()); + } + // precompute runtime classes + for (Struct struct : structsMap.values()) { + addRuntimeClass(struct); + } + // copy all structs to make them unmodifiable for outside users: + for (final Map.Entry entry : structsMap.entrySet()) { + entry.setValue(entry.getValue().freeze()); } - - this.structsMap = Collections.unmodifiableMap(structs); - this.transformsMap = Collections.unmodifiableMap(definition.transformsMap); - this.runtimeMap = Collections.unmodifiableMap(definition.runtimeMap); - - this.voidType = definition.voidType; - this.booleanType = definition.booleanType; - this.byteType = definition.byteType; - this.shortType = definition.shortType; - this.charType = definition.charType; - this.intType = definition.intType; - this.longType = definition.longType; - this.floatType = definition.floatType; - this.doubleType = definition.doubleType; - - this.voidobjType = definition.voidobjType; - this.booleanobjType = definition.booleanobjType; - this.byteobjType = definition.byteobjType; - this.shortobjType = definition.shortobjType; - this.charobjType = definition.charobjType; - this.intobjType = definition.intobjType; - this.longobjType = definition.longobjType; - this.floatobjType = definition.floatobjType; - this.doubleobjType = definition.doubleobjType; - - this.objectType = definition.objectType; - this.defType = definition.defType; - this.numberType = definition.numberType; - this.charseqType = definition.charseqType; - this.stringType = definition.stringType; - this.mathType = definition.mathType; - this.utilityType = definition.utilityType; - this.defobjType = definition.defobjType; - - this.itrType = definition.itrType; - this.oitrType = definition.oitrType; - this.sitrType = definition.sitrType; - - this.collectionType = definition.collectionType; - this.ocollectionType = definition.ocollectionType; - this.scollectionType = definition.scollectionType; - - this.listType = definition.listType; - this.arraylistType = definition.arraylistType; - this.olistType = definition.olistType; - this.oarraylistType = definition.oarraylistType; - this.slistType = definition.slistType; - this.sarraylistType = definition.sarraylistType; - - this.setType = definition.setType; - this.hashsetType = definition.hashsetType; - this.osetType = definition.osetType; - this.ohashsetType = definition.ohashsetType; - this.ssetType = definition.ssetType; - this.shashsetType = definition.shashsetType; - - this.mapType = definition.mapType; - this.hashmapType = definition.hashmapType; - this.oomapType = definition.oomapType; - this.oohashmapType = definition.oohashmapType; - this.smapType = definition.smapType; - this.shashmapType = definition.shashmapType; - this.somapType = definition.somapType; - this.sohashmapType = definition.sohashmapType; - - this.execType = definition.execType; - - this.exceptionType = definition.exceptionType; - this.arithexcepType = definition.arithexcepType; - this.iargexcepType = definition.iargexcepType; - this.istateexceptType = definition.istateexceptType; - this.nfexcepType = definition.nfexcepType; - - this.geoPointType = definition.geoPointType; - this.stringsType = definition.stringsType; - this.longsType = definition.longsType; - this.doublesType = definition.doublesType; - this.geoPointsType = definition.geoPointsType; - - this.featureTestType = definition.featureTestType; } - private void addStructs() { - addStruct( "void" , void.class ); - addStruct( "boolean" , boolean.class ); - addStruct( "byte" , byte.class ); - addStruct( "short" , short.class ); - addStruct( "char" , char.class ); - addStruct( "int" , int.class ); - addStruct( "long" , long.class ); - addStruct( "float" , float.class ); - addStruct( "double" , double.class ); - - addStruct( "Void" , Void.class ); - addStruct( "Boolean" , Boolean.class ); - addStruct( "Byte" , Byte.class ); - addStruct( "Short" , Short.class ); - addStruct( "Character" , Character.class ); - addStruct( "Integer" , Integer.class ); - addStruct( "Long" , Long.class ); - addStruct( "Float" , Float.class ); - addStruct( "Double" , Double.class ); - - addStruct( "Object" , Object.class ); - addStruct( "def" , Object.class ); - addStruct( "Number" , Number.class ); - addStruct( "CharSequence" , CharSequence.class ); - addStruct( "String" , String.class ); - addStruct( "Math" , Math.class ); - addStruct( "Utility" , Utility.class ); - addStruct( "Def" , Def.class ); - - addStruct( "Iterator" , Iterator.class ); - addStruct( "Iterator" , Iterator.class ); - addStruct( "Iterator" , Iterator.class ); - - addStruct( "Collection" , Collection.class ); - addStruct( "Collection" , Collection.class ); - addStruct( "Collection" , Collection.class ); - - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - addStruct( "Set" , Set.class ); - addStruct( "HashSet" , HashSet.class ); - - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - - addStruct( "Executable" , Executable.class ); - - addStruct( "Exception" , Exception.class); - addStruct( "ArithmeticException" , ArithmeticException.class); - addStruct( "IllegalArgumentException" , IllegalArgumentException.class); - addStruct( "IllegalStateException" , IllegalStateException.class); - addStruct( "NumberFormatException" , NumberFormatException.class); - - addStruct( "GeoPoint" , GeoPoint.class); - addStruct( "Strings" , ScriptDocValues.Strings.class); - addStruct( "Longs" , ScriptDocValues.Longs.class); - addStruct( "Doubles" , ScriptDocValues.Doubles.class); - addStruct( "GeoPoints" , ScriptDocValues.GeoPoints.class); - - addStruct( "FeatureTest", FeatureTest.class); + /** adds classes from definition. returns hierarchy */ + private Map> addStructs() { + final Map> hierarchy = new HashMap<>(); + for (String file : DEFINITION_FILES) { + int currentLine = -1; + try { + try (InputStream stream = Definition.class.getResourceAsStream(file); + LineNumberReader reader = new LineNumberReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + String line = null; + while ((line = reader.readLine()) != null) { + currentLine = reader.getLineNumber(); + line = line.trim(); + if (line.length() == 0 || line.charAt(0) == '#') { + continue; + } + if (line.startsWith("class ")) { + String elements[] = line.split("\u0020"); + assert elements[2].equals("->"); + if (elements.length == 7) { + hierarchy.put(elements[1], Arrays.asList(elements[5].split(","))); + } else { + assert elements.length == 5; + } + String className = elements[1]; + String javaPeer = elements[3]; + final Class javaClazz; + switch (javaPeer) { + case "void": + javaClazz = void.class; + break; + case "boolean": + javaClazz = boolean.class; + break; + case "byte": + javaClazz = byte.class; + break; + case "short": + javaClazz = short.class; + break; + case "char": + javaClazz = char.class; + break; + case "int": + javaClazz = int.class; + break; + case "long": + javaClazz = long.class; + break; + case "float": + javaClazz = float.class; + break; + case "double": + javaClazz = double.class; + break; + default: + javaClazz = Class.forName(javaPeer); + break; + } + addStruct(className, javaClazz); + } + } + } + } catch (Exception e) { + throw new RuntimeException("syntax error in " + file + ", line: " + currentLine, e); + } + } + return hierarchy; } + /** adds class methods/fields/ctors */ private void addElements() { - addMethod("Object", "equals", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Object", "hashCode", null, false, intType, new Type[] {}, null, null); - addMethod("Object", "toString", null, false, stringType, new Type[] {}, null, null); - - addMethod("def", "equals", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("def", "hashCode", null, false, intType, new Type[] {}, null, null); - addMethod("def", "toString", null, false, stringType, new Type[] {}, null, null); - - addConstructor("Boolean", "new", new Type[] {booleanType}, null); - addMethod("Boolean", "booleanValue", null, false, booleanType, new Type[] {}, null, null); - addMethod("Boolean", "compare", null, true, intType, new Type[] {booleanType,booleanType}, null, null); - addMethod("Boolean", "compareTo", null, false, intType, new Type[] {booleanobjType}, null, null); - addMethod("Boolean", "parseBoolean", null, true, booleanType, new Type[] {stringType}, null, null); - addMethod("Boolean", "valueOf", null, true, booleanobjType, new Type[] {booleanType}, null, null); - addField("Boolean", "FALSE", null, true, booleanobjType, null); - addField("Boolean", "TRUE", null, true, booleanobjType, null); - - addConstructor("Byte", "new", new Type[] {byteType}, null); - addMethod("Byte", "compare", null, true, intType, new Type[] {byteType,byteType}, null, null); - addMethod("Byte", "compareTo", null, false, intType, new Type[] {byteobjType}, null, null); - addMethod("Byte", "parseByte", null, true, byteType, new Type[] {stringType}, null, null); - addMethod("Byte", "valueOf", null, true, byteobjType, new Type[] {byteType}, null, null); - addField("Byte", "MIN_VALUE", null, true, byteType, null); - addField("Byte", "MAX_VALUE", null, true, byteType, null); - - addConstructor("Short", "new", new Type[] {shortType}, null); - addMethod("Short", "compare", null, true, intType, new Type[] {shortType,shortType}, null, null); - addMethod("Short", "compareTo", null, false, intType, new Type[] {shortobjType}, null, null); - addMethod("Short", "parseShort", null, true, shortType, new Type[] {stringType}, null, null); - addMethod("Short", "valueOf", null, true, shortobjType, new Type[] {shortType}, null, null); - addField("Short", "MIN_VALUE", null, true, shortType, null); - addField("Short", "MAX_VALUE", null, true, shortType, null); - - addConstructor("Character", "new", new Type[] {charType}, null); - addMethod("Character", "charCount", null, true, intType, new Type[] {intType}, null, null); - addMethod("Character", "charValue", null, false, charType, new Type[] {}, null, null); - addMethod("Character", "compare", null, true, intType, new Type[] {charType,charType}, null, null); - addMethod("Character", "compareTo", null, false, intType, new Type[] {charobjType}, null, null); - addMethod("Character", "digit", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Character", "forDigit", null, true, charType, new Type[] {intType,intType}, null, null); - addMethod("Character", "getName", null, true, stringType, new Type[] {intType}, null, null); - addMethod("Character", "getNumericValue", null, true, intType, new Type[] {intType}, null, null); - addMethod("Character", "isAlphabetic", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isDefined", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isDigit", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isIdeographic", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLetter", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLetterOrDigit", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isLowerCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isMirrored", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isSpaceChar", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isTitleCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isUpperCase", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "isWhitespace", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Character", "valueOf", null, true, charobjType, new Type[] {charType}, null, null); - addField("Character", "MIN_VALUE", null, true, charType, null); - addField("Character", "MAX_VALUE", null, true, charType, null); - - addConstructor("Integer", "new", new Type[] {intType}, null); - addMethod("Integer", "compare", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "compareTo", null, false, intType, new Type[] {intobjType}, null, null); - addMethod("Integer", "min", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "max", null, true, intType, new Type[] {intType,intType}, null, null); - addMethod("Integer", "parseInt", null, true, intType, new Type[] {stringType}, null, null); - addMethod("Integer", "signum", null, true, intType, new Type[] {intType}, null, null); - addMethod("Integer", "toHexString", null, true, stringType, new Type[] {intType}, null, null); - addMethod("Integer", "valueOf", null, true, intobjType, new Type[] {intType}, null, null); - addField("Integer", "MIN_VALUE", null, true, intType, null); - addField("Integer", "MAX_VALUE", null, true, intType, null); - - addConstructor("Long", "new", new Type[] {longType}, null); - addMethod("Long", "compare", null, true, intType, new Type[] {longType,longType}, null, null); - addMethod("Long", "compareTo", null, false, intType, new Type[] {longobjType}, null, null); - addMethod("Long", "min", null, true, longType, new Type[] {longType,longType}, null, null); - addMethod("Long", "max", null, true, longType, new Type[] {longType,longType}, null, null); - addMethod("Long", "parseLong", null, true, longType, new Type[] {stringType}, null, null); - addMethod("Long", "signum", null, true, intType, new Type[] {longType}, null, null); - addMethod("Long", "toHexString", null, true, stringType, new Type[] {longType}, null, null); - addMethod("Long", "valueOf", null, true, longobjType, new Type[] {longType}, null, null); - addField("Long", "MIN_VALUE", null, true, longType, null); - addField("Long", "MAX_VALUE", null, true, longType, null); - - addConstructor("Float", "new", new Type[] {floatType}, null); - addMethod("Float", "compare", null, true, intType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "compareTo", null, false, intType, new Type[] {floatobjType}, null, null); - addMethod("Float", "min", null, true, floatType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "max", null, true, floatType, new Type[] {floatType,floatType}, null, null); - addMethod("Float", "parseFloat", null, true, floatType, new Type[] {stringType}, null, null); - addMethod("Float", "toHexString", null, true, stringType, new Type[] {floatType}, null, null); - addMethod("Float", "valueOf", null, true, floatobjType, new Type[] {floatType}, null, null); - addField("Float", "MIN_VALUE", null, true, floatType, null); - addField("Float", "MAX_VALUE", null, true, floatType, null); - - addConstructor("Double", "new", new Type[] {doubleType}, null); - addMethod("Double", "compare", null, true, intType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "compareTo", null, false, intType, new Type[] {doubleobjType}, null, null); - addMethod("Double", "min", null, true, doubleType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "max", null, true, doubleType, new Type[] {doubleType,doubleType}, null, null); - addMethod("Double", "parseDouble", null, true, doubleType, new Type[] {stringType}, null, null); - addMethod("Double", "toHexString", null, true, stringType, new Type[] {doubleType}, null, null); - addMethod("Double", "valueOf", null, true, doubleobjType, new Type[] {doubleType}, null, null); - addField("Double", "MIN_VALUE", null, true, doubleType, null); - addField("Double", "MAX_VALUE", null, true, doubleType, null); - - addMethod("Number", "byteValue", null, false, byteType, new Type[] {}, null, null); - addMethod("Number", "shortValue", null, false, shortType, new Type[] {}, null, null); - addMethod("Number", "intValue", null, false, intType, new Type[] {}, null, null); - addMethod("Number", "longValue", null, false, longType, new Type[] {}, null, null); - addMethod("Number", "floatValue", null, false, floatType, new Type[] {}, null, null); - addMethod("Number", "doubleValue", null, false, doubleType, new Type[] {}, null, null); - - addMethod("CharSequence", "charAt", null, false, charType, new Type[] {intType}, null, null); - addMethod("CharSequence", "length", null, false, intType, new Type[] {}, null, null); - - addConstructor("String", "new", new Type[] {}, null); - addMethod("String", "codePointAt", null, false, intType, new Type[] {intType}, null, null); - addMethod("String", "compareTo", null, false, intType, new Type[] {stringType}, null, null); - addMethod("String", "concat", null, false, stringType, new Type[] {stringType}, null, null); - addMethod("String", "endsWith", null, false, booleanType, new Type[] {stringType}, null, null); - addMethod("String", "indexOf", null, false, intType, new Type[] {stringType}, null, null); - addMethod("String", "indexOf", null, false, intType, new Type[] {stringType, intType}, null, null); - addMethod("String", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("String", "replace", null, false, stringType, new Type[] {charseqType, charseqType}, null, null); - addMethod("String", "startsWith", null, false, booleanType, new Type[] {stringType}, null, null); - addMethod("String", "substring", null, false, stringType, new Type[] {intType, intType}, null, null); - addMethod("String", "toCharArray", null, false, getType(charType.struct, 1), new Type[] {}, null, null); - addMethod("String", "trim", null, false, stringType, new Type[] {}, null, null); - - addMethod("Utility", "NumberToboolean", null, true, booleanType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberTochar", null, true, charType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToBoolean", null, true, booleanobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToByte", null, true, byteobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToShort", null, true, shortobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToCharacter", null, true, charobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToInteger", null, true, intobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToLong", null, true, longobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToFloat", null, true, floatobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "NumberToDouble", null, true, doubleobjType, new Type[] {numberType}, null, null); - addMethod("Utility", "booleanTobyte", null, true, byteType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToshort", null, true, shortType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTochar", null, true, charType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToint", null, true, intType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTolong", null, true, longType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTofloat", null, true, floatType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanTodouble", null, true, doubleType, new Type[] {booleanType}, null, null); - addMethod("Utility", "booleanToInteger", null, true, intobjType, new Type[] {booleanType}, null, null); - addMethod("Utility", "BooleanTobyte", null, true, byteType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToshort", null, true, shortType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTochar", null, true, charType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToint", null, true, intType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTolong", null, true, longType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTofloat", null, true, floatType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanTodouble", null, true, doubleType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToByte", null, true, byteobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToShort", null, true, shortobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToCharacter", null, true, charobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToInteger", null, true, intobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToLong", null, true, longobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToFloat", null, true, floatobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "BooleanToDouble", null, true, doubleobjType, new Type[] {booleanobjType}, null, null); - addMethod("Utility", "byteToboolean", null, true, booleanType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToShort", null, true, shortobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToCharacter", null, true, charobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToInteger", null, true, intobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToLong", null, true, longobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToFloat", null, true, floatobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "byteToDouble", null, true, doubleobjType, new Type[] {byteType}, null, null); - addMethod("Utility", "ByteToboolean", null, true, booleanType, new Type[] {byteobjType}, null, null); - addMethod("Utility", "ByteTochar", null, true, charType, new Type[] {byteobjType}, null, null); - addMethod("Utility", "shortToboolean", null, true, booleanType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToByte", null, true, byteobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToCharacter", null, true, charobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToInteger", null, true, intobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToLong", null, true, longobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToFloat", null, true, floatobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "shortToDouble", null, true, doubleobjType, new Type[] {shortType}, null, null); - addMethod("Utility", "ShortToboolean", null, true, booleanType, new Type[] {shortobjType}, null, null); - addMethod("Utility", "ShortTochar", null, true, charType, new Type[] {shortobjType}, null, null); - addMethod("Utility", "charToboolean", null, true, booleanType, new Type[] {charType}, null, null); - addMethod("Utility", "charToByte", null, true, byteobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToShort", null, true, shortobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToInteger", null, true, intobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToLong", null, true, longobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToFloat", null, true, floatobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToDouble", null, true, doubleobjType, new Type[] {charType}, null, null); - addMethod("Utility", "charToString", null, true, stringType, new Type[] {charType}, null, null); - addMethod("Utility", "CharacterToboolean", null, true, booleanType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTobyte", null, true, byteType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToshort", null, true, shortType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToint", null, true, intType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTolong", null, true, longType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTofloat", null, true, floatType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterTodouble", null, true, doubleType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToBoolean", null, true, booleanobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToByte", null, true, byteobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToShort", null, true, shortobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToInteger", null, true, intobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToLong", null, true, longobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToFloat", null, true, floatobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToDouble", null, true, doubleobjType, new Type[] {charobjType}, null, null); - addMethod("Utility", "CharacterToString", null, true, stringType, new Type[] {charobjType}, null, null); - addMethod("Utility", "intToboolean", null, true, booleanType, new Type[] {intType}, null, null); - addMethod("Utility", "intToByte", null, true, byteobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToShort", null, true, shortobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToCharacter", null, true, charobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToLong", null, true, longobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToFloat", null, true, floatobjType, new Type[] {intType}, null, null); - addMethod("Utility", "intToDouble", null, true, doubleobjType, new Type[] {intType}, null, null); - addMethod("Utility", "IntegerToboolean", null, true, booleanType, new Type[] {intobjType}, null, null); - addMethod("Utility", "IntegerTochar", null, true, charType, new Type[] {intobjType}, null, null); - addMethod("Utility", "longToboolean", null, true, booleanType, new Type[] {longType}, null, null); - addMethod("Utility", "longToByte", null, true, byteobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToShort", null, true, shortobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToCharacter", null, true, charobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToInteger", null, true, intobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToFloat", null, true, floatobjType, new Type[] {longType}, null, null); - addMethod("Utility", "longToDouble", null, true, doubleobjType, new Type[] {longType}, null, null); - addMethod("Utility", "LongToboolean", null, true, booleanType, new Type[] {longobjType}, null, null); - addMethod("Utility", "LongTochar", null, true, charType, new Type[] {longobjType}, null, null); - addMethod("Utility", "floatToboolean", null, true, booleanType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToByte", null, true, byteobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToShort", null, true, shortobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToCharacter", null, true, charobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToInteger", null, true, intobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToLong", null, true, longobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "floatToDouble", null, true, doubleobjType, new Type[] {floatType}, null, null); - addMethod("Utility", "FloatToboolean", null, true, booleanType, new Type[] {floatobjType}, null, null); - addMethod("Utility", "FloatTochar", null, true, charType, new Type[] {floatobjType}, null, null); - addMethod("Utility", "doubleToboolean", null, true, booleanType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToByte", null, true, byteobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToShort", null, true, shortobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToCharacter", null, true, charobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToInteger", null, true, intobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToLong", null, true, longobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "doubleToFloat", null, true, floatobjType, new Type[] {doubleType}, null, null); - addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); - addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); - addMethod("Utility", "StringTochar", null, true, charType, new Type[] {stringType}, null, null); - addMethod("Utility", "StringToCharacter", null, true, charobjType, new Type[] {stringType}, null, null); - - addMethod("Math", "abs", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "acos", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "asin", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "atan", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "atan2", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "cbrt", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "ceil", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "cos", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "cosh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "exp", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "expm1", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "floor", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "hypot", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "log", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "log10", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "log1p", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "max", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "min", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "pow", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); - addMethod("Math", "random", null, true, doubleType, new Type[] {}, null, null); - addMethod("Math", "rint", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "round", null, true, longType, new Type[] {doubleType}, null, null); - addMethod("Math", "sin", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "sinh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "sqrt", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "tan", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "tanh", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "toDegrees", null, true, doubleType, new Type[] {doubleType}, null, null); - addMethod("Math", "toRadians", null, true, doubleType, new Type[] {doubleType}, null, null); - addField("Math", "E", null, true, doubleType, null); - addField("Math", "PI", null, true, doubleType, null); - - addMethod("Def", "DefTobyteImplicit", null, true, byteType, new Type[] {defType}, null, null); - addMethod("Def", "DefToshortImplicit", null, true, shortType, new Type[] {defType}, null, null); - addMethod("Def", "DefTocharImplicit", null, true, charType, new Type[] {defType}, null, null); - addMethod("Def", "DefTointImplicit", null, true, intType, new Type[] {defType}, null, null); - addMethod("Def", "DefTolongImplicit", null, true, longType, new Type[] {defType}, null, null); - addMethod("Def", "DefTofloatImplicit", null, true, floatType, new Type[] {defType}, null, null); - addMethod("Def", "DefTodoubleImplicit", null, true, doubleType, new Type[] {defType}, null, null); - addMethod("Def", "DefToByteImplicit", null, true, byteobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToShortImplicit", null, true, shortobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToCharacterImplicit", null, true, charobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToIntegerImplicit", null, true, intobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToLongImplicit", null, true, longobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToFloatImplicit", null, true, floatobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToDoubleImplicit", null, true, doubleobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefTobyteExplicit", null, true, byteType, new Type[] {defType}, null, null); - addMethod("Def", "DefToshortExplicit", null, true, shortType, new Type[] {defType}, null, null); - addMethod("Def", "DefTocharExplicit", null, true, charType, new Type[] {defType}, null, null); - addMethod("Def", "DefTointExplicit", null, true, intType, new Type[] {defType}, null, null); - addMethod("Def", "DefTolongExplicit", null, true, longType, new Type[] {defType}, null, null); - addMethod("Def", "DefTofloatExplicit", null, true, floatType, new Type[] {defType}, null, null); - addMethod("Def", "DefTodoubleExplicit", null, true, doubleType, new Type[] {defType}, null, null); - addMethod("Def", "DefToByteExplicit", null, true, byteobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToShortExplicit", null, true, shortobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToCharacterExplicit", null, true, charobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToIntegerExplicit", null, true, intobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToLongExplicit", null, true, longobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToFloatExplicit", null, true, floatobjType, new Type[] {defType}, null, null); - addMethod("Def", "DefToDoubleExplicit", null, true, doubleobjType, new Type[] {defType}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, defType, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, null, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); - addMethod("Iterator", "next", null, false, objectType, new Type[] {}, stringType, null); - addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, itrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, oitrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); - addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Collection", "iterator", null, false, sitrType, new Type[] {}, null, null); - addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, defType, new Type[] {intType, defType}); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, null, null); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, stringType, - new Type[] {intType, stringType}); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, stringType, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, stringType, null); - addMethod("List", "getLength", "size", false, intType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addConstructor("HashSet", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {defType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {defType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, setType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, null); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, null, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, - new Type[] {stringType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, - new Type[] {stringType, objectType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); - addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); - addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("Exception", "getMessage", null, false, stringType, new Type[] {}, null, null); - - addConstructor("ArithmeticException", "new", new Type[] {stringType}, null); - - addConstructor("IllegalArgumentException", "new", new Type[] {stringType}, null); - - addConstructor("IllegalStateException", "new", new Type[] {stringType}, null); - - addConstructor("NumberFormatException", "new", new Type[] {stringType}, null); - - addMethod("GeoPoint", "getLat", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoint", "getLon", null, false, doubleType, new Type[] {}, null, null); - addMethod("Strings", "getValue", null, false, stringType, new Type[] {}, null, null); - addMethod("Strings", "getValues", null, false, slistType, new Type[] {}, null, null); - addMethod("Longs", "getValue", null, false, longType, new Type[] {}, null, null); - addMethod("Longs", "getValues", null, false, olistType, new Type[] {}, null, null); - // TODO: add better date support for Longs here? (carefully?) - addMethod("Doubles", "getValue", null, false, doubleType, new Type[] {}, null, null); - addMethod("Doubles", "getValues", null, false, olistType, new Type[] {}, null, null); - addMethod("GeoPoints", "getValue", null, false, geoPointType, new Type[] {}, null, null); - addMethod("GeoPoints", "getValues", null, false, olistType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLat", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLon", null, false, doubleType, new Type[] {}, null, null); - addMethod("GeoPoints", "getLats", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); - addMethod("GeoPoints", "getLons", null, false, getType(doubleType.struct, 1), new Type[] {}, null, null); - // geo distance functions... so many... - addMethod("GeoPoints", "factorDistance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistance02", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "factorDistance13", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInKm", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInKmWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInMiles", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "arcDistanceInMilesWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distance", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInKm", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInKmWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInMiles", null, false, doubleType, - new Type[] { doubleType, doubleType }, null, null); - addMethod("GeoPoints", "distanceInMilesWithDefault", null, false, doubleType, - new Type[] { doubleType, doubleType, doubleType }, null, null); - addMethod("GeoPoints", "geohashDistance", null, false, doubleType, - new Type[] { stringType }, null, null); - addMethod("GeoPoints", "geohashDistanceInKm", null, false, doubleType, - new Type[] { stringType }, null, null); - addMethod("GeoPoints", "geohashDistanceInMiles", null, false, doubleType, - new Type[] { stringType }, null, null); - - // currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods - addConstructor("FeatureTest", "new", new Type[] {}, null); - addConstructor("FeatureTest", "new", new Type[] {intType, intType}, null); - addMethod("FeatureTest", "getX", null, false, intType, new Type[] {}, null, null); - addMethod("FeatureTest", "getY", null, false, intType, new Type[] {}, null, null); - addMethod("FeatureTest", "setX", null, false, voidType, new Type[] {intType}, null, null); - addMethod("FeatureTest", "setY", null, false, voidType, new Type[] {intType}, null, null); - addMethod("FeatureTest", "overloadedStatic", null, true, booleanType, new Type[] {}, null, null); - addMethod("FeatureTest", "overloadedStatic", null, true, booleanType, new Type[] {booleanType}, null, null); - } - - private void copyStructs() { - copyStruct("Void", "Object"); - copyStruct("Boolean", "Object"); - copyStruct("Byte", "Number", "Object"); - copyStruct("Short", "Number", "Object"); - copyStruct("Character", "Object"); - copyStruct("Integer", "Number", "Object"); - copyStruct("Long", "Number", "Object"); - copyStruct("Float", "Number", "Object"); - copyStruct("Double", "Number", "Object"); - - copyStruct("Number", "Object"); - copyStruct("CharSequence", "Object"); - copyStruct("String", "CharSequence", "Object"); - - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - copyStruct("List", "Collection", "Object"); - copyStruct("ArrayList", "List", "Collection", "Object"); - - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - copyStruct("Set", "Collection", "Object"); - copyStruct("HashSet", "Set", "Collection", "Object"); - - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - - copyStruct("Executable", "Object"); - - copyStruct("Exception", "Object"); - copyStruct("ArithmeticException", "Exception", "Object"); - copyStruct("IllegalArgumentException", "Exception", "Object"); - copyStruct("IllegalStateException", "Exception", "Object"); - copyStruct("NumberFormatException", "Exception", "Object"); - - copyStruct("GeoPoint", "Object"); - copyStruct("Strings", "List", "Collection", "Object"); - copyStruct("Longs", "List", "Collection", "Object"); - copyStruct("Doubles", "List", "Collection", "Object"); - copyStruct("GeoPoints", "List", "Collection", "Object"); - - copyStruct("FeatureTest", "Object"); - } - - private void addTransforms() { - addTransform(booleanType, objectType, "Boolean", "valueOf", true, false); - addTransform(booleanType, defType, "Boolean", "valueOf", true, false); - addTransform(booleanType, booleanobjType, "Boolean", "valueOf", true, false); - - addTransform(byteType, shortType, false); - addTransform(byteType, charType, true); - addTransform(byteType, intType, false); - addTransform(byteType, longType, false); - addTransform(byteType, floatType, false); - addTransform(byteType, doubleType, false); - addTransform(byteType, objectType, "Byte", "valueOf", true, false); - addTransform(byteType, defType, "Byte", "valueOf", true, false); - addTransform(byteType, numberType, "Byte", "valueOf", true, false); - addTransform(byteType, byteobjType, "Byte", "valueOf", true, false); - addTransform(byteType, shortobjType, "Utility", "byteToShort", true, false); - addTransform(byteType, charobjType, "Utility", "byteToCharacter", true, true); - addTransform(byteType, intobjType, "Utility", "byteToInteger", true, false); - addTransform(byteType, longobjType, "Utility", "byteToLong", true, false); - addTransform(byteType, floatobjType, "Utility", "byteToFloat", true, false); - addTransform(byteType, doubleobjType, "Utility", "byteToDouble", true, false); - - addTransform(shortType, byteType, true); - addTransform(shortType, charType, true); - addTransform(shortType, intType, false); - addTransform(shortType, longType, false); - addTransform(shortType, floatType, false); - addTransform(shortType, doubleType, false); - addTransform(shortType, objectType, "Short", "valueOf", true, false); - addTransform(shortType, defType, "Short", "valueOf", true, false); - addTransform(shortType, numberType, "Short", "valueOf", true, false); - addTransform(shortType, byteobjType, "Utility", "shortToByte", true, true); - addTransform(shortType, shortobjType, "Short", "valueOf", true, false); - addTransform(shortType, charobjType, "Utility", "shortToCharacter", true, true); - addTransform(shortType, intobjType, "Utility", "shortToInteger", true, false); - addTransform(shortType, longobjType, "Utility", "shortToLong", true, false); - addTransform(shortType, floatobjType, "Utility", "shortToFloat", true, false); - addTransform(shortType, doubleobjType, "Utility", "shortToDouble", true, false); - - addTransform(charType, byteType, true); - addTransform(charType, shortType, true); - addTransform(charType, intType, false); - addTransform(charType, longType, false); - addTransform(charType, floatType, false); - addTransform(charType, doubleType, false); - addTransform(charType, objectType, "Character", "valueOf", true, false); - addTransform(charType, defType, "Character", "valueOf", true, false); - addTransform(charType, numberType, "Utility", "charToInteger", true, false); - addTransform(charType, byteobjType, "Utility", "charToByte", true, true); - addTransform(charType, shortobjType, "Utility", "charToShort", true, true); - addTransform(charType, charobjType, "Character", "valueOf", true, false); - addTransform(charType, intobjType, "Utility", "charToInteger", true, false); - addTransform(charType, longobjType, "Utility", "charToLong", true, false); - addTransform(charType, floatobjType, "Utility", "charToFloat", true, false); - addTransform(charType, doubleobjType, "Utility", "charToDouble", true, false); - addTransform(charType, stringType, "Utility", "charToString", true, true); - - addTransform(intType, byteType, true); - addTransform(intType, shortType, true); - addTransform(intType, charType, true); - addTransform(intType, longType, false); - addTransform(intType, floatType, false); - addTransform(intType, doubleType, false); - addTransform(intType, objectType, "Integer", "valueOf", true, false); - addTransform(intType, defType, "Integer", "valueOf", true, false); - addTransform(intType, numberType, "Integer", "valueOf", true, false); - addTransform(intType, byteobjType, "Utility", "intToByte", true, true); - addTransform(intType, shortobjType, "Utility", "intToShort", true, true); - addTransform(intType, charobjType, "Utility", "intToCharacter", true, true); - addTransform(intType, intobjType, "Integer", "valueOf", true, false); - addTransform(intType, longobjType, "Utility", "intToLong", true, false); - addTransform(intType, floatobjType, "Utility", "intToFloat", true, false); - addTransform(intType, doubleobjType, "Utility", "intToDouble", true, false); - - addTransform(longType, byteType, true); - addTransform(longType, shortType, true); - addTransform(longType, charType, true); - addTransform(longType, intType, false); - addTransform(longType, floatType, false); - addTransform(longType, doubleType, false); - addTransform(longType, objectType, "Long", "valueOf", true, false); - addTransform(longType, defType, "Long", "valueOf", true, false); - addTransform(longType, numberType, "Long", "valueOf", true, false); - addTransform(longType, byteobjType, "Utility", "longToByte", true, true); - addTransform(longType, shortobjType, "Utility", "longToShort", true, true); - addTransform(longType, charobjType, "Utility", "longToCharacter", true, true); - addTransform(longType, intobjType, "Utility", "longToInteger", true, true); - addTransform(longType, longobjType, "Long", "valueOf", true, false); - addTransform(longType, floatobjType, "Utility", "longToFloat", true, false); - addTransform(longType, doubleobjType, "Utility", "longToDouble", true, false); - - addTransform(floatType, byteType, true); - addTransform(floatType, shortType, true); - addTransform(floatType, charType, true); - addTransform(floatType, intType, true); - addTransform(floatType, longType, false); - addTransform(floatType, doubleType, false); - addTransform(floatType, objectType, "Float", "valueOf", true, false); - addTransform(floatType, defType, "Float", "valueOf", true, false); - addTransform(floatType, numberType, "Float", "valueOf", true, false); - addTransform(floatType, byteobjType, "Utility", "floatToByte", true, true); - addTransform(floatType, shortobjType, "Utility", "floatToShort", true, true); - addTransform(floatType, charobjType, "Utility", "floatToCharacter", true, true); - addTransform(floatType, intobjType, "Utility", "floatToInteger", true, true); - addTransform(floatType, longobjType, "Utility", "floatToLong", true, true); - addTransform(floatType, floatobjType, "Float", "valueOf", true, false); - addTransform(floatType, doubleobjType, "Utility", "floatToDouble", true, false); - - addTransform(doubleType, byteType, true); - addTransform(doubleType, shortType, true); - addTransform(doubleType, charType, true); - addTransform(doubleType, intType, true); - addTransform(doubleType, longType, true); - addTransform(doubleType, floatType, false); - addTransform(doubleType, objectType, "Double", "valueOf", true, false); - addTransform(doubleType, defType, "Double", "valueOf", true, false); - addTransform(doubleType, numberType, "Double", "valueOf", true, false); - addTransform(doubleType, byteobjType, "Utility", "doubleToByte", true, true); - addTransform(doubleType, shortobjType, "Utility", "doubleToShort", true, true); - addTransform(doubleType, charobjType, "Utility", "doubleToCharacter", true, true); - addTransform(doubleType, intobjType, "Utility", "doubleToInteger", true, true); - addTransform(doubleType, longobjType, "Utility", "doubleToLong", true, true); - addTransform(doubleType, floatobjType, "Utility", "doubleToFloat", true, true); - addTransform(doubleType, doubleobjType, "Double", "valueOf", true, false); - - addTransform(objectType, booleanType, "Boolean", "booleanValue", false, true); - addTransform(objectType, byteType, "Number", "byteValue", false, true); - addTransform(objectType, shortType, "Number", "shortValue", false, true); - addTransform(objectType, charType, "Character", "charValue", false, true); - addTransform(objectType, intType, "Number", "intValue", false, true); - addTransform(objectType, longType, "Number", "longValue", false, true); - addTransform(objectType, floatType, "Number", "floatValue", false, true); - addTransform(objectType, doubleType, "Number", "doubleValue", false, true); - - addTransform(defType, booleanType, "Boolean", "booleanValue", false, false); - addTransform(defType, byteType, "Def", "DefTobyteImplicit", true, false); - addTransform(defType, shortType, "Def", "DefToshortImplicit", true, false); - addTransform(defType, charType, "Def", "DefTocharImplicit", true, false); - addTransform(defType, intType, "Def", "DefTointImplicit", true, false); - addTransform(defType, longType, "Def", "DefTolongImplicit", true, false); - addTransform(defType, floatType, "Def", "DefTofloatImplicit", true, false); - addTransform(defType, doubleType, "Def", "DefTodoubleImplicit", true, false); - addTransform(defType, byteobjType, "Def", "DefToByteImplicit", true, false); - addTransform(defType, shortobjType, "Def", "DefToShortImplicit", true, false); - addTransform(defType, charobjType, "Def", "DefToCharacterImplicit", true, false); - addTransform(defType, intobjType, "Def", "DefToIntegerImplicit", true, false); - addTransform(defType, longobjType, "Def", "DefToLongImplicit", true, false); - addTransform(defType, floatobjType, "Def", "DefToFloatImplicit", true, false); - addTransform(defType, doubleobjType, "Def", "DefToDoubleImplicit", true, false); - addTransform(defType, byteType, "Def", "DefTobyteExplicit", true, true); - addTransform(defType, shortType, "Def", "DefToshortExplicit", true, true); - addTransform(defType, charType, "Def", "DefTocharExplicit", true, true); - addTransform(defType, intType, "Def", "DefTointExplicit", true, true); - addTransform(defType, longType, "Def", "DefTolongExplicit", true, true); - addTransform(defType, floatType, "Def", "DefTofloatExplicit", true, true); - addTransform(defType, doubleType, "Def", "DefTodoubleExplicit", true, true); - addTransform(defType, byteobjType, "Def", "DefToByteExplicit", true, true); - addTransform(defType, shortobjType, "Def", "DefToShortExplicit", true, true); - addTransform(defType, charobjType, "Def", "DefToCharacterExplicit", true, true); - addTransform(defType, intobjType, "Def", "DefToIntegerExplicit", true, true); - addTransform(defType, longobjType, "Def", "DefToLongExplicit", true, true); - addTransform(defType, floatobjType, "Def", "DefToFloatExplicit", true, true); - addTransform(defType, doubleobjType, "Def", "DefToDoubleExplicit", true, true); - - addTransform(numberType, byteType, "Number", "byteValue", false, true); - addTransform(numberType, shortType, "Number", "shortValue", false, true); - addTransform(numberType, charType, "Utility", "NumberTochar", true, true); - addTransform(numberType, intType, "Number", "intValue", false, true); - addTransform(numberType, longType, "Number", "longValue", false, true); - addTransform(numberType, floatType, "Number", "floatValue", false, true); - addTransform(numberType, doubleType, "Number", "doubleValue", false, true); - addTransform(numberType, booleanobjType, "Utility", "NumberToBoolean", true, true); - addTransform(numberType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(numberType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(numberType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(numberType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(numberType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(numberType, floatobjType, "Utility", "NumberToFloat", true, true); - addTransform(numberType, doubleobjType, "Utility", "NumberToDouble", true, true); - - addTransform(booleanobjType, booleanType, "Boolean", "booleanValue", false, false); - - addTransform(byteobjType, byteType, "Byte", "byteValue", false, false); - addTransform(byteobjType, shortType, "Byte", "shortValue", false, false); - addTransform(byteobjType, charType, "Utility", "ByteTochar", true, false); - addTransform(byteobjType, intType, "Byte", "intValue", false, false); - addTransform(byteobjType, longType, "Byte", "longValue", false, false); - addTransform(byteobjType, floatType, "Byte", "floatValue", false, false); - addTransform(byteobjType, doubleType, "Byte", "doubleValue", false, false); - addTransform(byteobjType, shortobjType, "Utility", "NumberToShort", true, false); - addTransform(byteobjType, charobjType, "Utility", "NumberToCharacter", true, false); - addTransform(byteobjType, intobjType, "Utility", "NumberToInteger", true, false); - addTransform(byteobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(byteobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(byteobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(shortobjType, byteType, "Short", "byteValue", false, true); - addTransform(shortobjType, shortType, "Short", "shortValue", false, true); - addTransform(shortobjType, charType, "Utility", "ShortTochar", true, false); - addTransform(shortobjType, intType, "Short", "intValue", false, false); - addTransform(shortobjType, longType, "Short", "longValue", false, false); - addTransform(shortobjType, floatType, "Short", "floatValue", false, false); - addTransform(shortobjType, doubleType, "Short", "doubleValue", false, false); - addTransform(shortobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(shortobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(shortobjType, intobjType, "Utility", "NumberToInteger", true, false); - addTransform(shortobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(shortobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(shortobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(charobjType, byteType, "Utility", "CharacterTobyte", true, true); - addTransform(charobjType, shortType, "Utility", "CharacterToshort", true, false); - addTransform(charobjType, charType, "Character", "charValue", false, true); - addTransform(charobjType, intType, "Utility", "CharacterToint", true, false); - addTransform(charobjType, longType, "Utility", "CharacterTolong", true, false); - addTransform(charobjType, floatType, "Utility", "CharacterTofloat", true, false); - addTransform(charobjType, doubleType, "Utility", "CharacterTodouble", true, false); - addTransform(charobjType, byteobjType, "Utility", "CharacterToByte", true, true); - addTransform(charobjType, shortobjType, "Utility", "CharacterToShort", true, true); - addTransform(charobjType, intobjType, "Utility", "CharacterToInteger", true, false); - addTransform(charobjType, longobjType, "Utility", "CharacterToLong", true, false); - addTransform(charobjType, floatobjType, "Utility", "CharacterToFloat", true, false); - addTransform(charobjType, doubleobjType, "Utility", "CharacterToDouble", true, false); - addTransform(charobjType, stringType, "Utility", "CharacterToString", true, true); - - addTransform(intobjType, byteType, "Integer", "byteValue", false, true); - addTransform(intobjType, shortType, "Integer", "shortValue", false, true); - addTransform(intobjType, charType, "Utility", "IntegerTochar", true, true); - addTransform(intobjType, intType, "Integer", "intValue", false, false); - addTransform(intobjType, longType, "Integer", "longValue", false, false); - addTransform(intobjType, floatType, "Integer", "floatValue", false, false); - addTransform(intobjType, doubleType, "Integer", "doubleValue", false, false); - addTransform(intobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(intobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(intobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(intobjType, longobjType, "Utility", "NumberToLong", true, false); - addTransform(intobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(intobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(longobjType, byteType, "Long", "byteValue", false, true); - addTransform(longobjType, shortType, "Long", "shortValue", false, true); - addTransform(longobjType, charType, "Utility", "LongTochar", true, true); - addTransform(longobjType, intType, "Long", "intValue", false, true); - addTransform(longobjType, longType, "Long", "longValue", false, false); - addTransform(longobjType, floatType, "Long", "floatValue", false, false); - addTransform(longobjType, doubleType, "Long", "doubleValue", false, false); - addTransform(longobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(longobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(longobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(longobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(longobjType, floatobjType, "Utility", "NumberToFloat", true, false); - addTransform(longobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(floatobjType, byteType, "Float", "byteValue", false, true); - addTransform(floatobjType, shortType, "Float", "shortValue", false, true); - addTransform(floatobjType, charType, "Utility", "FloatTochar", true, true); - addTransform(floatobjType, intType, "Float", "intValue", false, true); - addTransform(floatobjType, longType, "Float", "longValue", false, true); - addTransform(floatobjType, floatType, "Float", "floatValue", false, false); - addTransform(floatobjType, doubleType, "Float", "doubleValue", false, false); - addTransform(floatobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(floatobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(floatobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(floatobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(floatobjType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(floatobjType, doubleobjType, "Utility", "NumberToDouble", true, false); - - addTransform(doubleobjType, byteType, "Double", "byteValue", false, true); - addTransform(doubleobjType, shortType, "Double", "shortValue", false, true); - addTransform(doubleobjType, charType, "Utility", "DoubleTochar", true, true); - addTransform(doubleobjType, intType, "Double", "intValue", false, true); - addTransform(doubleobjType, longType, "Double", "longValue", false, true); - addTransform(doubleobjType, floatType, "Double", "floatValue", false, true); - addTransform(doubleobjType, doubleType, "Double", "doubleValue", false, false); - addTransform(doubleobjType, byteobjType, "Utility", "NumberToByte", true, true); - addTransform(doubleobjType, shortobjType, "Utility", "NumberToShort", true, true); - addTransform(doubleobjType, charobjType, "Utility", "NumberToCharacter", true, true); - addTransform(doubleobjType, intobjType, "Utility", "NumberToInteger", true, true); - addTransform(doubleobjType, longobjType, "Utility", "NumberToLong", true, true); - addTransform(doubleobjType, floatobjType, "Utility", "NumberToFloat", true, true); - - addTransform(stringType, charType, "Utility", "StringTochar", true, true); - addTransform(stringType, charobjType, "Utility", "StringToCharacter", true, true); - } - - private void addRuntimeClasses() { - addRuntimeClass(booleanType.struct); - addRuntimeClass(byteType.struct); - addRuntimeClass(shortType.struct); - addRuntimeClass(charType.struct); - addRuntimeClass(intType.struct); - addRuntimeClass(longType.struct); - addRuntimeClass(floatType.struct); - addRuntimeClass(doubleType.struct); - - addRuntimeClass(booleanobjType.struct); - addRuntimeClass(byteobjType.struct); - addRuntimeClass(shortobjType.struct); - addRuntimeClass(charobjType.struct); - addRuntimeClass(intobjType.struct); - addRuntimeClass(longobjType.struct); - addRuntimeClass(floatobjType.struct); - addRuntimeClass(doubleobjType.struct); - - addRuntimeClass(objectType.struct); - addRuntimeClass(numberType.struct); - addRuntimeClass(charseqType.struct); - addRuntimeClass(stringType.struct); - - addRuntimeClass(oitrType.struct); - addRuntimeClass(ocollectionType.struct); - addRuntimeClass(olistType.struct); - addRuntimeClass(oarraylistType.struct); - addRuntimeClass(osetType.struct); - addRuntimeClass(ohashsetType.struct); - addRuntimeClass(oomapType.struct); - addRuntimeClass(oohashmapType.struct); - - addRuntimeClass(exceptionType.struct); - - addRuntimeClass(geoPointType.struct); - addRuntimeClass(stringsType.struct); - addRuntimeClass(longsType.struct); - addRuntimeClass(doublesType.struct); - addRuntimeClass(geoPointsType.struct); - - addRuntimeClass(featureTestType.struct); + for (String file : DEFINITION_FILES) { + int currentLine = -1; + try { + try (InputStream stream = Definition.class.getResourceAsStream(file); + LineNumberReader reader = new LineNumberReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + String line = null; + String currentClass = null; + while ((line = reader.readLine()) != null) { + currentLine = reader.getLineNumber(); + line = line.trim(); + if (line.length() == 0 || line.charAt(0) == '#') { + continue; + } else if (line.startsWith("class ")) { + assert currentClass == null; + currentClass = line.split("\u0020")[1]; + } else if (line.equals("}")) { + assert currentClass != null; + currentClass = null; + } else { + assert currentClass != null; + addSignature(currentClass, line); + } + } + } + } catch (Exception e) { + throw new RuntimeException("syntax error in " + file + ", line: " + currentLine, e); + } + } } private final void addStruct(final String name, final Class clazz) { - if (!name.matches("^[_a-zA-Z][<>,_a-zA-Z0-9]*$")) { + if (!name.matches("^[_a-zA-Z][\\.,_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid struct name [" + name + "]."); } @@ -1599,9 +551,10 @@ public final class Definition { final Struct struct = new Struct(name, clazz, org.objectweb.asm.Type.getType(clazz)); structsMap.put(name, struct); + simpleTypesMap.put(name, getTypeInternal(name)); } - private final void addConstructor(final String struct, final String name, final Type[] args, final Type[] genargs) { + private final void addConstructorInternal(final String struct, final String name, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -1634,14 +587,6 @@ public final class Definition { final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { - if (genargs != null) { - if (!args[count].clazz.isAssignableFrom(genargs[count].clazz)) { - throw new ClassCastException("Generic argument [" + genargs[count].name + "]" + - " is not a sub class of [" + args[count].name + "] in the constructor" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - classes[count] = args[count].clazz; } @@ -1655,79 +600,96 @@ public final class Definition { } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); - final Constructor constructor = - new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); + final Constructor constructor = new Constructor(name, owner, Arrays.asList(args), asm); owner.constructors.put(methodKey, constructor); } - private final void addMethod(final String struct, final String name, final String alias, final boolean statik, - final Type rtn, final Type[] args, final Type genrtn, final Type[] genargs) { + /** + * Adds a new signature to the definition. + *

+ * Signatures have the following forms: + *

    + *
  • {@code void method(String,int)} + *
  • {@code boolean field} + *
  • {@code Class (String)} + *
+ * no spaces allowed. + */ + private final void addSignature(String className, String signature) { + String elements[] = signature.split("\u0020"); + if (elements.length != 2) { + throw new IllegalArgumentException("Malformed signature: " + signature); + } + // method or field type (e.g. return type) + Type rtn = getTypeInternal(elements[0]); + int parenIndex = elements[1].indexOf('('); + if (parenIndex != -1) { + // method or ctor + int parenEnd = elements[1].indexOf(')'); + final Type args[]; + if (parenEnd > parenIndex + 1) { + String arguments[] = elements[1].substring(parenIndex + 1, parenEnd).split(","); + args = new Type[arguments.length]; + for (int i = 0; i < arguments.length; i++) { + args[i] = getTypeInternal(arguments[i]); + } + } else { + args = new Type[0]; + } + String methodName = elements[1].substring(0, parenIndex); + if (methodName.equals("")) { + if (!elements[0].equals(className)) { + throw new IllegalArgumentException("Constructors must return their own type"); + } + addConstructorInternal(className, "new", args); + } else { + if (methodName.indexOf('/') >= 0) { + String nameAndAlias[] = methodName.split("/"); + if (nameAndAlias.length != 2) { + throw new IllegalArgumentException("Currently only two aliases are allowed!"); + } + addMethodInternal(className, nameAndAlias[0], nameAndAlias[1], rtn, args); + } else { + addMethodInternal(className, methodName, null, rtn, args); + } + } + } else { + // field + addFieldInternal(className, elements[1], null, rtn); + } + } + + private final void addMethodInternal(final String struct, final String name, final String alias, + final Type rtn, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined" + - " for " + (statik ? "function" : "method") + " [" + name + "]."); + " for method [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { - throw new IllegalArgumentException("Invalid " + (statik ? "static method" : "method") + - " name [" + name + "] with the struct [" + owner.name + "]."); + throw new IllegalArgumentException("Invalid method name" + + " [" + name + "] with the struct [" + owner.name + "]."); } MethodKey methodKey = new MethodKey(name, args.length); if (owner.constructors.containsKey(methodKey)) { - throw new IllegalArgumentException("Constructors and " + (statik ? "static methods" : "methods") + + throw new IllegalArgumentException("Constructors and methods" + " may not have the same signature [" + methodKey + "] within the same struct" + " [" + owner.name + "]."); } - if (owner.staticMethods.containsKey(methodKey)) { - if (statik) { - throw new IllegalArgumentException( - "Duplicate static method signature [" + methodKey + "] found within the struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Static methods and methods may not have the same signature" + - " [" + methodKey + "] within the same struct [" + owner.name + "]."); - } - } - - if (owner.methods.containsKey(methodKey)) { - if (statik) { - throw new IllegalArgumentException("Static methods and methods may not have the same signature" + - " [" + methodKey + "] within the same struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Duplicate method signature [" + methodKey + "]" + - " found within the struct [" + owner.name + "]."); - } - } - - if (genrtn != null) { - if (!rtn.clazz.isAssignableFrom(genrtn.clazz)) { - throw new ClassCastException("Generic return [" + genrtn.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - - if (genargs != null && genargs.length != args.length) { - throw new IllegalArgumentException("Generic arguments arity [" + genargs.length + "] is not the same as " + - (statik ? "function" : "method") + " [" + name + "] arguments arity" + - " [" + args.length + "] within the struct [" + owner.name + "]."); + if (owner.staticMethods.containsKey(methodKey) || owner.methods.containsKey(methodKey)) { + throw new IllegalArgumentException( + "Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "]."); } final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { - if (genargs != null) { - if (!args[count].clazz.isAssignableFrom(genargs[count].clazz)) { - throw new ClassCastException("Generic argument [" + genargs[count].name + "] is not a sub class" + - " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + - " [" + name + " ] from the struct [" + owner.name + "]."); - } - } - classes[count] = args[count].clazz; } @@ -1736,15 +698,15 @@ public final class Definition { try { reflect = owner.clazz.getMethod(alias == null ? name : alias, classes); } catch (final NoSuchMethodException exception) { - throw new IllegalArgumentException((statik ? "Function" : "Method") + - " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + + throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + + "] not found for class [" + owner.clazz.getName() + "]" + " with arguments " + Arrays.toString(classes) + "."); } if (!reflect.getReturnType().equals(rtn.clazz)) { throw new IllegalArgumentException("Specified return type class [" + rtn.clazz + "]" + - " does not match the found return type class [" + reflect.getReturnType() + "] for the " + - (statik ? "function" : "method") + " [" + name + "]" + + " does not match the found return type class [" + reflect.getReturnType() + "] for the" + + " method [" + name + "]" + " within the struct [" + owner.name + "]."); } @@ -1760,67 +722,33 @@ public final class Definition { " with arguments " + Arrays.toString(classes) + "."); } - final Method method = new Method(name, owner, genrtn != null ? genrtn : rtn, - Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); final int modifiers = reflect.getModifiers(); + final Method method = new Method(name, owner, rtn, Arrays.asList(args), asm, modifiers, handle); - if (statik) { - if (!java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Function [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a static Java method."); - } - + if (java.lang.reflect.Modifier.isStatic(modifiers)) { owner.staticMethods.put(methodKey, method); } else { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Method [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a non-static Java method."); - } - owner.methods.put(methodKey, method); } } - private final void addField(final String struct, final String name, final String alias, - final boolean statik, final Type type, final Type generic) { + private final void addFieldInternal(final String struct, final String name, final String alias, + final Type type) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for " + - (statik ? "static" : "member") + " [" + name + "]."); + " field [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { - throw new IllegalArgumentException("Invalid " + (statik ? "static" : "member") + + throw new IllegalArgumentException("Invalid field " + " name [" + name + "] with the struct [" + owner.name + "]."); } - if (owner.staticMembers.containsKey(name)) { - if (statik) { - throw new IllegalArgumentException("Duplicate static name [" + name + "]" + - " found within the struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); - } - } - - if (owner.members.containsKey(name)) { - if (statik) { - throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); - } else { - throw new IllegalArgumentException("Duplicate member name [" + name + "]" + - " found within the struct [" + owner.name + "]."); - } - } - - if (generic != null) { - if (!type.clazz.isAssignableFrom(generic.clazz)) { - throw new ClassCastException("Generic type [" + generic.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + - " [" + name + " ] from the struct [" + owner.name + "]."); - } + if (owner.staticMembers.containsKey(name) || owner.members.containsKey(name)) { + throw new IllegalArgumentException("Duplicate field name [" + name + "]" + + " found within the struct [" + owner.name + "]."); } java.lang.reflect.Field reflect; @@ -1832,11 +760,14 @@ public final class Definition { " not found for class [" + owner.clazz.getName() + "]."); } + final int modifiers = reflect.getModifiers(); + boolean isStatic = java.lang.reflect.Modifier.isStatic(modifiers); + MethodHandle getter = null; MethodHandle setter = null; try { - if (!statik) { + if (!isStatic) { getter = MethodHandles.publicLookup().unreflectGetter(reflect); setter = MethodHandles.publicLookup().unreflectSetter(reflect); } @@ -1845,42 +776,33 @@ public final class Definition { " not found for class [" + owner.clazz.getName() + "]."); } - final Field field = new Field(name, owner, generic == null ? type : generic, type, reflect, getter, setter); - final int modifiers = reflect.getModifiers(); - - if (statik) { - if (!java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException(); - } + final Field field = new Field(name, reflect.getName(), owner, type, modifiers, getter, setter); + if (isStatic) { + // require that all static fields are static final if (!java.lang.reflect.Modifier.isFinal(modifiers)) { throw new IllegalArgumentException("Static [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to static Java field."); + " within the struct [" + owner.name + "] is not final."); } owner.staticMembers.put(alias == null ? name : alias, field); } else { - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - throw new IllegalArgumentException("Member [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to non-static Java field."); - } - owner.members.put(alias == null ? name : alias, field); } } - private final void copyStruct(final String struct, final String... children) { + private void copyStruct(String struct, List children) { final Struct owner = structsMap.get(struct); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } - for (int count = 0; count < children.length; ++count) { - final Struct child = structsMap.get(children[count]); + for (int count = 0; count < children.size(); ++count) { + final Struct child = structsMap.get(children.get(count)); - if (struct == null) { - throw new IllegalArgumentException("Child struct [" + children[count] + "]" + + if (child == null) { + throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + " not defined for copy to owner struct [" + owner.name + "]."); } @@ -1889,198 +811,24 @@ public final class Definition { " is not a super type of owner struct [" + owner.name + "] in copy."); } - final boolean object = child.clazz.equals(Object.class) && - java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); - for (Map.Entry kvPair : child.methods.entrySet()) { MethodKey methodKey = kvPair.getKey(); Method method = kvPair.getValue(); if (owner.methods.get(methodKey) == null) { - final Class clazz = object ? Object.class : owner.clazz; - - java.lang.reflect.Method reflect; - MethodHandle handle; - - try { - reflect = clazz.getMethod(method.method.getName(), method.reflect.getParameterTypes()); - } catch (final NoSuchMethodException exception) { - throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); - } - - try { - handle = MethodHandles.publicLookup().in(owner.clazz).unreflect(reflect); - } catch (final IllegalAccessException exception) { - throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); - } - owner.methods.put(methodKey, - new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); + new Method(method.name, owner, method.rtn, method.arguments, method.method, method.modifiers, method.handle)); } } - for (final Field field : child.members.values()) { + for (Field field : child.members.values()) { if (owner.members.get(field.name) == null) { - java.lang.reflect.Field reflect; - MethodHandle getter; - MethodHandle setter; - - try { - reflect = owner.clazz.getField(field.reflect.getName()); - } catch (final NoSuchFieldException exception) { - throw new IllegalArgumentException("Field [" + field.reflect.getName() + "]" + - " not found for class [" + owner.clazz.getName() + "]."); - } - - try { - getter = MethodHandles.publicLookup().unreflectGetter(reflect); - setter = MethodHandles.publicLookup().unreflectSetter(reflect); - } catch (final IllegalAccessException exception) { - throw new IllegalArgumentException("Getter/Setter [" + field.name + "]" + - " not found for class [" + owner.clazz.getName() + "]."); - } - owner.members.put(field.name, - new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); + new Field(field.name, field.javaName, owner, field.type, field.modifiers, field.getter, field.setter)); } } } } - private final void addTransform(final Type from, final Type to, final boolean explicit) { - if (from.equals(to)) { - throw new IllegalArgumentException("Transform cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); - } - - if (!from.sort.primitive || !to.sort.primitive) { - throw new IllegalArgumentException("Only transforms between two primitives may be a simple cast, but" + - "found [" + from.name + "] and [" + to.name + "]."); - } - - final Cast cast = new Cast(from, to, explicit); - - if (transformsMap.containsKey(cast)) { - throw new IllegalArgumentException("Transform with " + - " cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); - } - - transformsMap.put(cast, cast); - } - - private final void addTransform(final Type from, final Type to, final String struct, - final String name, final boolean statik, final boolean explicit) { - final Struct owner = structsMap.get(struct); - - if (owner == null) { - throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for" + - " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); - } - - if (from.equals(to)) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "] cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); - } - - final Cast cast = new Cast(from, to, explicit); - - if (transformsMap.containsKey(cast)) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); - } - - final Cast transform; - - final Method method; - Type upcast = null; - Type downcast = null; - - // transforms are implicitly arity of 0, unless a static method where its 1 (receiver passed) - final MethodKey methodKey = new MethodKey(name, statik ? 1 : 0); - - if (statik) { - method = owner.staticMethods.get(methodKey); - - if (method == null) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a function [" + name + "] that is not defined."); - } - - if (method.arguments.size() != 1) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using function [" + name + "] does not have a single type argument."); - } - - Type argument = method.arguments.get(0); - - if (!argument.clazz.isAssignableFrom(from.clazz)) { - if (from.clazz.isAssignableFrom(argument.clazz)) { - upcast = argument; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast from type to the function input argument type."); - } - } - - final Type rtn = method.rtn; - - if (!to.clazz.isAssignableFrom(rtn.clazz)) { - if (rtn.clazz.isAssignableFrom(to.clazz)) { - downcast = to; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast to type to the function return argument type."); - } - } - } else { - method = owner.methods.get(methodKey); - - if (method == null) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a method [" + name + "] that is not defined."); - } - - if (!method.arguments.isEmpty()) { - throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using method [" + name + "] does not have a single type argument."); - } - - if (!owner.clazz.isAssignableFrom(from.clazz)) { - if (from.clazz.isAssignableFrom(owner.clazz)) { - upcast = getType(owner.name); - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " method [" + name + "] cannot cast from type to the method input argument type."); - } - } - - final Type rtn = method.rtn; - - if (!to.clazz.isAssignableFrom(rtn.clazz)) { - if (rtn.clazz.isAssignableFrom(to.clazz)) { - downcast = to; - } else { - throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + - " using method [" + name + "] cannot cast to type to the method return argument type."); - } - } - } - - transform = new Transform(cast, method, upcast, downcast); - transformsMap.put(cast, transform); - } - /** * Precomputes a more efficient structure for dynamic method/field access. */ @@ -2132,41 +880,48 @@ public final class Definition { runtimeMap.put(struct.clazz, new RuntimeClass(methods, getters, setters)); } - public final Type getType(final String name) { - final int dimensions = getDimensions(name); - final String structstr = dimensions == 0 ? name : name.substring(0, name.indexOf('[')); - final Struct struct = structsMap.get(structstr); + private Type getTypeInternal(String name) { + // simple types (e.g. 0 array dimensions) are a simple hash lookup for speed + Type simple = simpleTypesMap.get(name); + + if (simple != null) { + return simple; + } + + int dimensions = getDimensions(name); + String structstr = dimensions == 0 ? name : name.substring(0, name.indexOf('[')); + Struct struct = structsMap.get(structstr); if (struct == null) { throw new IllegalArgumentException("The struct with name [" + name + "] has not been defined."); } - return getType(struct, dimensions); + return getTypeInternal(struct, dimensions); } - public final Type getType(final Struct struct, final int dimensions) { + private Type getTypeInternal(Struct struct, int dimensions) { String name = struct.name; org.objectweb.asm.Type type = struct.type; Class clazz = struct.clazz; Sort sort; if (dimensions > 0) { - final StringBuilder builder = new StringBuilder(name); - final char[] brackets = new char[dimensions]; + StringBuilder builder = new StringBuilder(name); + char[] brackets = new char[dimensions]; for (int count = 0; count < dimensions; ++count) { builder.append("[]"); brackets[count] = '['; } - final String descriptor = new String(brackets) + struct.type.getDescriptor(); + String descriptor = new String(brackets) + struct.type.getDescriptor(); name = builder.toString(); type = org.objectweb.asm.Type.getType(descriptor); try { clazz = Class.forName(type.getInternalName().replace('/', '.')); - } catch (final ClassNotFoundException exception) { + } catch (ClassNotFoundException exception) { throw new IllegalArgumentException("The class [" + type.getInternalName() + "]" + " could not be found to create type [" + name + "]."); } @@ -2177,7 +932,7 @@ public final class Definition { } else { sort = Sort.OBJECT; - for (final Sort value : Sort.values()) { + for (Sort value : Sort.values()) { if (value.clazz == null) { continue; } @@ -2193,12 +948,12 @@ public final class Definition { return new Type(name, dimensions, struct, clazz, type, sort); } - private int getDimensions(final String name) { + private int getDimensions(String name) { int dimensions = 0; int index = name.indexOf('['); if (index != -1) { - final int length = name.length(); + int length = name.length(); while (index < length) { if (name.charAt(index) == '[' && ++index < length && name.charAt(index++) == ']') { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index fbb1d246a83..90c02b7e801 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -21,7 +21,6 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Transform; import org.elasticsearch.painless.Definition.Type; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; @@ -34,10 +33,7 @@ import java.util.ArrayList; import java.util.Deque; import java.util.List; -import static org.elasticsearch.painless.WriterConstants.ADDEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.ADDEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.ADDWOOVERLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.CHAR_TO_STRING; import static org.elasticsearch.painless.WriterConstants.DEF_ADD_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_AND_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_DIV_CALL; @@ -47,21 +43,27 @@ import static org.elasticsearch.painless.WriterConstants.DEF_OR_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_REM_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_RSH_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_SUB_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BOOLEAN; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BYTE_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_BYTE_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_CHAR_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_CHAR_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_DOUBLE_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_DOUBLE_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_FLOAT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_FLOAT_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_INT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_INT_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_LONG_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_LONG_IMPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_EXPLICIT; +import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_USH_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; import static org.elasticsearch.painless.WriterConstants.DEF_XOR_CALL; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_INT; -import static org.elasticsearch.painless.WriterConstants.DIVWOOVERLOW_LONG; import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS; -import static org.elasticsearch.painless.WriterConstants.MULEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.MULEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.MULWOOVERLOW_FLOAT; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; -import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.REMWOOVERLOW_FLOAT; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_CHAR; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_DOUBLE; @@ -73,29 +75,9 @@ import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_ST import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_CONSTRUCTOR; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TOSTRING; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_TYPE; +import static org.elasticsearch.painless.WriterConstants.STRING_TO_CHAR; import static org.elasticsearch.painless.WriterConstants.STRING_TYPE; -import static org.elasticsearch.painless.WriterConstants.SUBEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.SUBEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.SUBWOOVERLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOBYTEEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOBYTEWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOCHAREXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOCHARWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOFLOATWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOINTEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOINTWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOLONGWOOVERFLOW_FLOAT; -import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.TOSHORTEXACT_LONG; -import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_DOUBLE; -import static org.elasticsearch.painless.WriterConstants.TOSHORTWOOVERFLOW_FLOAT; +import static org.elasticsearch.painless.WriterConstants.UTILITY_TYPE; /** * Extension of {@link GeneratorAdapter} with some utility methods. @@ -132,49 +114,84 @@ public final class MethodWriter extends GeneratorAdapter { visitVarInsn(Opcodes.ILOAD, slot); push(0); ifICmp(GeneratorAdapter.GT, end); - throwException(PAINLESS_ERROR_TYPE, - "The maximum number of statements that can be executed in a loop has been reached."); + throwException(PAINLESS_ERROR_TYPE, "The maximum number of statements that can be executed in a loop has been reached."); mark(end); } } public void writeCast(final Cast cast) { - if (cast instanceof Transform) { - final Transform transform = (Transform)cast; - - if (transform.upcast != null) { - checkCast(transform.upcast.type); - } - - if (java.lang.reflect.Modifier.isStatic(transform.method.reflect.getModifiers())) { - invokeStatic(transform.method.owner.type, transform.method.method); - } else if (java.lang.reflect.Modifier.isInterface(transform.method.owner.clazz.getModifiers())) { - invokeInterface(transform.method.owner.type, transform.method.method); - } else { - invokeVirtual(transform.method.owner.type, transform.method.method); - } - - if (transform.downcast != null) { - checkCast(transform.downcast.type); - } - } else if (cast != null) { + if (cast != null) { final Type from = cast.from; final Type to = cast.to; - if (from.equals(to)) { - return; - } - - if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { - cast(from.type, to.type); - } else { - if (!to.clazz.isAssignableFrom(from.clazz)) { - checkCast(to.type); + if (from.sort == Sort.CHAR && to.sort == Sort.STRING) { + invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); + } else if (from.sort == Sort.STRING && to.sort == Sort.CHAR) { + invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); + } else if (cast.unboxFrom) { + if (from.sort == Sort.DEF) { + if (cast.explicit) { + if (to.sort == Sort.BOOL) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (to.sort == Sort.BYTE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); + else if (to.sort == Sort.SHORT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); + else if (to.sort == Sort.CHAR) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); + else if (to.sort == Sort.INT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); + else if (to.sort == Sort.LONG) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); + else if (to.sort == Sort.FLOAT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); + else if (to.sort == Sort.DOUBLE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); + else throw new IllegalStateException("Illegal tree structure."); + } else { + if (to.sort == Sort.BOOL) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (to.sort == Sort.BYTE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); + else if (to.sort == Sort.SHORT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); + else if (to.sort == Sort.CHAR) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); + else if (to.sort == Sort.INT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); + else if (to.sort == Sort.LONG) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); + else if (to.sort == Sort.FLOAT) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); + else if (to.sort == Sort.DOUBLE) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); + else throw new IllegalStateException("Illegal tree structure."); + } + } else { + unbox(from.type); + writeCast(from, to); } + } else if (cast.unboxTo) { + writeCast(from, to); + unbox(to.type); + } else if (cast.boxFrom) { + box(from.type); + writeCast(from, to); + } else if (cast.boxTo) { + writeCast(from, to); + box(to.type); + } else { + writeCast(from, to); } } } + private void writeCast(final Type from, final Type to) { + if (from.equals(to)) { + return; + } + + if (from.sort.numeric && from.sort.primitive && to.sort.numeric && to.sort.primitive) { + cast(from.type, to.type); + } else { + if (!to.clazz.isAssignableFrom(from.clazz)) { + checkCast(to.type); + } + } + } + + /** + * Proxy the box method to use valueOf instead to ensure that the modern boxing methods are used. + */ + @Override + public void box(org.objectweb.asm.Type type) { + valueOf(type); + } + public void writeBranch(final Label tru, final Label fals) { if (tru != null) { visitJumpInsn(Opcodes.IFNE, tru); @@ -182,7 +199,7 @@ public final class MethodWriter extends GeneratorAdapter { visitJumpInsn(Opcodes.IFEQ, fals); } } - + public void writeNewStrings() { if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) { // Java 9+: we just push our argument collector onto deque @@ -236,231 +253,51 @@ public final class MethodWriter extends GeneratorAdapter { } } - public void writeBinaryInstruction(final CompilerSettings settings, final Definition definition, - final String location, - final Type type, final Operation operation) { + public void writeBinaryInstruction(final String location, final Type type, final Operation operation) { final Sort sort = type.sort; - boolean exact = !settings.getNumericOverflow() && - ((sort == Sort.INT || sort == Sort.LONG) && - (operation == Operation.MUL || operation == Operation.DIV || - operation == Operation.ADD || operation == Operation.SUB) || - (sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM || - operation == Operation.ADD || operation == Operation.SUB)); - if (exact) { - switch (sort) { - case INT: - switch (operation) { - case MUL: invokeStatic(definition.mathType.type, MULEXACT_INT); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_INT); break; - case ADD: invokeStatic(definition.mathType.type, ADDEXACT_INT); break; - case SUB: invokeStatic(definition.mathType.type, SUBEXACT_INT); break; - } + if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && + (operation == Operation.LSH || operation == Operation.USH || + operation == Operation.RSH || operation == Operation.BWAND || + operation == Operation.XOR || operation == Operation.BWOR)) { + throw new IllegalStateException("Error " + location + ": Illegal tree structure."); + } - break; - case LONG: - switch (operation) { - case MUL: invokeStatic(definition.mathType.type, MULEXACT_LONG); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_LONG); break; - case ADD: invokeStatic(definition.mathType.type, ADDEXACT_LONG); break; - case SUB: invokeStatic(definition.mathType.type, SUBEXACT_LONG); break; - } - - break; - case FLOAT: - switch (operation) { - case MUL: invokeStatic(definition.utilityType.type, MULWOOVERLOW_FLOAT); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_FLOAT); break; - case REM: invokeStatic(definition.utilityType.type, REMWOOVERLOW_FLOAT); break; - case ADD: invokeStatic(definition.utilityType.type, ADDWOOVERLOW_FLOAT); break; - case SUB: invokeStatic(definition.utilityType.type, SUBWOOVERLOW_FLOAT); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - break; - case DOUBLE: - switch (operation) { - case MUL: invokeStatic(definition.utilityType.type, MULWOOVERLOW_DOUBLE); break; - case DIV: invokeStatic(definition.utilityType.type, DIVWOOVERLOW_DOUBLE); break; - case REM: invokeStatic(definition.utilityType.type, REMWOOVERLOW_DOUBLE); break; - case ADD: invokeStatic(definition.utilityType.type, ADDWOOVERLOW_DOUBLE); break; - case SUB: invokeStatic(definition.utilityType.type, SUBWOOVERLOW_DOUBLE); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - break; + if (sort == Sort.DEF) { + switch (operation) { + case MUL: invokeStatic(DEF_UTIL_TYPE, DEF_MUL_CALL); break; + case DIV: invokeStatic(DEF_UTIL_TYPE, DEF_DIV_CALL); break; + case REM: invokeStatic(DEF_UTIL_TYPE, DEF_REM_CALL); break; + case ADD: invokeStatic(DEF_UTIL_TYPE, DEF_ADD_CALL); break; + case SUB: invokeStatic(DEF_UTIL_TYPE, DEF_SUB_CALL); break; + case LSH: invokeStatic(DEF_UTIL_TYPE, DEF_LSH_CALL); break; + case USH: invokeStatic(DEF_UTIL_TYPE, DEF_RSH_CALL); break; + case RSH: invokeStatic(DEF_UTIL_TYPE, DEF_USH_CALL); break; + case BWAND: invokeStatic(DEF_UTIL_TYPE, DEF_AND_CALL); break; + case XOR: invokeStatic(DEF_UTIL_TYPE, DEF_XOR_CALL); break; + case BWOR: invokeStatic(DEF_UTIL_TYPE, DEF_OR_CALL); break; default: throw new IllegalStateException("Error " + location + ": Illegal tree structure."); } } else { - if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (operation == Operation.LSH || operation == Operation.USH || - operation == Operation.RSH || operation == Operation.BWAND || - operation == Operation.XOR || operation == Operation.BWOR)) { - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - - if (sort == Sort.DEF) { - switch (operation) { - case MUL: invokeStatic(definition.defobjType.type, DEF_MUL_CALL); break; - case DIV: invokeStatic(definition.defobjType.type, DEF_DIV_CALL); break; - case REM: invokeStatic(definition.defobjType.type, DEF_REM_CALL); break; - case ADD: invokeStatic(definition.defobjType.type, DEF_ADD_CALL); break; - case SUB: invokeStatic(definition.defobjType.type, DEF_SUB_CALL); break; - case LSH: invokeStatic(definition.defobjType.type, DEF_LSH_CALL); break; - case USH: invokeStatic(definition.defobjType.type, DEF_RSH_CALL); break; - case RSH: invokeStatic(definition.defobjType.type, DEF_USH_CALL); break; - case BWAND: invokeStatic(definition.defobjType.type, DEF_AND_CALL); break; - case XOR: invokeStatic(definition.defobjType.type, DEF_XOR_CALL); break; - case BWOR: invokeStatic(definition.defobjType.type, DEF_OR_CALL); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } - } else { - switch (operation) { - case MUL: math(GeneratorAdapter.MUL, type.type); break; - case DIV: math(GeneratorAdapter.DIV, type.type); break; - case REM: math(GeneratorAdapter.REM, type.type); break; - case ADD: math(GeneratorAdapter.ADD, type.type); break; - case SUB: math(GeneratorAdapter.SUB, type.type); break; - case LSH: math(GeneratorAdapter.SHL, type.type); break; - case USH: math(GeneratorAdapter.USHR, type.type); break; - case RSH: math(GeneratorAdapter.SHR, type.type); break; - case BWAND: math(GeneratorAdapter.AND, type.type); break; - case XOR: math(GeneratorAdapter.XOR, type.type); break; - case BWOR: math(GeneratorAdapter.OR, type.type); break; - default: - throw new IllegalStateException("Error " + location + ": Illegal tree structure."); - } + switch (operation) { + case MUL: math(GeneratorAdapter.MUL, type.type); break; + case DIV: math(GeneratorAdapter.DIV, type.type); break; + case REM: math(GeneratorAdapter.REM, type.type); break; + case ADD: math(GeneratorAdapter.ADD, type.type); break; + case SUB: math(GeneratorAdapter.SUB, type.type); break; + case LSH: math(GeneratorAdapter.SHL, type.type); break; + case USH: math(GeneratorAdapter.USHR, type.type); break; + case RSH: math(GeneratorAdapter.SHR, type.type); break; + case BWAND: math(GeneratorAdapter.AND, type.type); break; + case XOR: math(GeneratorAdapter.XOR, type.type); break; + case BWOR: math(GeneratorAdapter.OR, type.type); break; + default: + throw new IllegalStateException("Error " + location + ": Illegal tree structure."); } } } - /** - * Called for any compound assignment (including increment/decrement instructions). - * We have to be stricter than writeBinary and do overflow checks against the original type's size - * instead of the promoted type's size, since the result will be implicitly cast back. - * - * @return This will be true if an instruction is written, false otherwise. - */ - public boolean writeExactInstruction( - final Definition definition, final Sort fsort, final Sort tsort) { - if (fsort == Sort.DOUBLE) { - if (tsort == Sort.FLOAT) { - invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.FLOAT_OBJ) { - invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - checkCast(definition.floatobjType.type); - } else if (tsort == Sort.LONG) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.LONG_OBJ) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - checkCast(definition.longobjType.type); - } else if (tsort == Sort.INT) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.FLOAT) { - if (tsort == Sort.LONG) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - } else if (tsort == Sort.LONG_OBJ) { - invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - checkCast(definition.longobjType.type); - } else if (tsort == Sort.INT) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.LONG) { - if (tsort == Sort.INT) { - invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - } else if (tsort == Sort.INT_OBJ) { - invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - checkCast(definition.intobjType.type); - } else if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (fsort == Sort.INT) { - if (tsort == Sort.CHAR) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - } else if (tsort == Sort.CHAR_OBJ) { - invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - checkCast(definition.charobjType.type); - } else if (tsort == Sort.SHORT) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - } else if (tsort == Sort.SHORT_OBJ) { - invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - checkCast(definition.shortobjType.type); - } else if (tsort == Sort.BYTE) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - } else if (tsort == Sort.BYTE_OBJ) { - invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - checkCast(definition.byteobjType.type); - } else { - return false; - } - } else { - return false; - } - - return true; - } - public void writeDup(final int size, final int xsize) { if (size == 1) { if (xsize == 2) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index e5998948d62..67f889b7a72 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless; + import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptModule; /** @@ -29,6 +29,11 @@ import org.elasticsearch.script.ScriptModule; */ public final class PainlessPlugin extends Plugin { + // force to pare our definition at startup (not on the user's first script) + static { + Definition.VOID_TYPE.hashCode(); + } + @Override public String name() { return "lang-painless"; @@ -41,6 +46,6 @@ public final class PainlessPlugin extends Plugin { public void onModule(final ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration( - PainlessScriptEngineService.class, PainlessScriptEngineService.NAME, ScriptMode.ON)); + PainlessScriptEngineService.class, PainlessScriptEngineService.NAME, true)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java index dafc6aaba8a..f9ee949e3d4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngineService.java @@ -38,9 +38,7 @@ import java.security.AccessController; import java.security.Permissions; import java.security.PrivilegedAction; import java.security.ProtectionDomain; -import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; /** @@ -117,18 +115,18 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme // Use custom settings specified by params. compilerSettings = new CompilerSettings(); Map copy = new HashMap<>(params); - String value = copy.remove(CompilerSettings.NUMERIC_OVERFLOW); - - if (value != null) { - compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); - } - - value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); + String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); if (value != null) { compilerSettings.setMaxLoopCounter(Integer.parseInt(value)); } + value = copy.remove(CompilerSettings.PICKY); + + if (value != null) { + compilerSettings.setPicky(Boolean.parseBoolean(value)); + } + if (!copy.isEmpty()) { throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy); } @@ -212,7 +210,7 @@ public final class PainlessScriptEngineService extends AbstractComponent impleme * Action taken when the engine is closed. */ @Override - public void close() throws IOException { + public void close() { // Nothing to do. } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java index 32641649827..5ab3450db7e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Utility.java @@ -25,442 +25,10 @@ package org.elasticsearch.painless; */ public class Utility { - public static boolean NumberToboolean(final Number value) { - return value.longValue() != 0; - } - - public static char NumberTochar(final Number value) { - return (char)value.intValue(); - } - - public static Boolean NumberToBoolean(final Number value) { - return value.longValue() != 0; - } - - public static Byte NumberToByte(final Number value) { - return value == null ? null : value.byteValue(); - } - - public static Short NumberToShort(final Number value) { - return value == null ? null : value.shortValue(); - } - - public static Character NumberToCharacter(final Number value) { - return value == null ? null : (char)value.intValue(); - } - - public static Integer NumberToInteger(final Number value) { - return value == null ? null : value.intValue(); - } - - public static Long NumberToLong(final Number value) { - return value == null ? null : value.longValue(); - } - - public static Float NumberToFloat(final Number value) { - return value == null ? null : value.floatValue(); - } - - public static Double NumberToDouble(final Number value) { - return value == null ? null : value.doubleValue(); - } - - public static byte booleanTobyte(final boolean value) { - return (byte)(value ? 1 : 0); - } - - public static short booleanToshort(final boolean value) { - return (short)(value ? 1 : 0); - } - - public static char booleanTochar(final boolean value) { - return (char)(value ? 1 : 0); - } - - public static int booleanToint(final boolean value) { - return value ? 1 : 0; - } - - public static long booleanTolong(final boolean value) { - return value ? 1 : 0; - } - - public static float booleanTofloat(final boolean value) { - return value ? 1 : 0; - } - - public static double booleanTodouble(final boolean value) { - return value ? 1 : 0; - } - - public static Integer booleanToInteger(final boolean value) { - return value ? 1 : 0; - } - - public static byte BooleanTobyte(final Boolean value) { - return (byte)(value ? 1 : 0); - } - - public static short BooleanToshort(final Boolean value) { - return (short)(value ? 1 : 0); - } - - public static char BooleanTochar(final Boolean value) { - return (char)(value ? 1 : 0); - } - - public static int BooleanToint(final Boolean value) { - return value ? 1 : 0; - } - - public static long BooleanTolong(final Boolean value) { - return value ? 1 : 0; - } - - public static float BooleanTofloat(final Boolean value) { - return value ? 1 : 0; - } - - public static double BooleanTodouble(final Boolean value) { - return value ? 1 : 0; - } - - public static Byte BooleanToByte(final Boolean value) { - return value == null ? null : (byte)(value ? 1 : 0); - } - - public static Short BooleanToShort(final Boolean value) { - return value == null ? null : (short)(value ? 1 : 0); - } - - public static Character BooleanToCharacter(final Boolean value) { - return value == null ? null : (char)(value ? 1 : 0); - } - - public static Integer BooleanToInteger(final Boolean value) { - return value == null ? null : value ? 1 : 0; - } - - public static Long BooleanToLong(final Boolean value) { - return value == null ? null : value ? 1L : 0L; - } - - public static Float BooleanToFloat(final Boolean value) { - return value == null ? null : value ? 1F : 0F; - } - - public static Double BooleanToDouble(final Boolean value) { - return value == null ? null : value ? 1D : 0D; - } - - public static boolean byteToboolean(final byte value) { - return value != 0; - } - - public static Short byteToShort(final byte value) { - return (short)value; - } - - public static Character byteToCharacter(final byte value) { - return (char)value; - } - - public static Integer byteToInteger(final byte value) { - return (int)value; - } - - public static Long byteToLong(final byte value) { - return (long)value; - } - - public static Float byteToFloat(final byte value) { - return (float)value; - } - - public static Double byteToDouble(final byte value) { - return (double)value; - } - - public static boolean ByteToboolean(final Byte value) { - return value != 0; - } - - public static char ByteTochar(final Byte value) { - return (char)value.byteValue(); - } - - public static boolean shortToboolean(final short value) { - return value != 0; - } - - public static Byte shortToByte(final short value) { - return (byte)value; - } - - public static Character shortToCharacter(final short value) { - return (char)value; - } - - public static Integer shortToInteger(final short value) { - return (int)value; - } - - public static Long shortToLong(final short value) { - return (long)value; - } - - public static Float shortToFloat(final short value) { - return (float)value; - } - - public static Double shortToDouble(final short value) { - return (double)value; - } - - public static boolean ShortToboolean(final Short value) { - return value != 0; - } - - public static char ShortTochar(final Short value) { - return (char)value.shortValue(); - } - - public static boolean charToboolean(final char value) { - return value != 0; - } - - public static Byte charToByte(final char value) { - return (byte)value; - } - - public static Short charToShort(final char value) { - return (short)value; - } - - public static Integer charToInteger(final char value) { - return (int)value; - } - - public static Long charToLong(final char value) { - return (long)value; - } - - public static Float charToFloat(final char value) { - return (float)value; - } - - public static Double charToDouble(final char value) { - return (double)value; - } - public static String charToString(final char value) { return String.valueOf(value); } - public static boolean CharacterToboolean(final Character value) { - return value != 0; - } - - public static byte CharacterTobyte(final Character value) { - return (byte)value.charValue(); - } - - public static short CharacterToshort(final Character value) { - return (short)value.charValue(); - } - - public static int CharacterToint(final Character value) { - return value; - } - - public static long CharacterTolong(final Character value) { - return value; - } - - public static float CharacterTofloat(final Character value) { - return value; - } - - public static double CharacterTodouble(final Character value) { - return value; - } - - public static Boolean CharacterToBoolean(final Character value) { - return value == null ? null : value != 0; - } - - public static Byte CharacterToByte(final Character value) { - return value == null ? null : (byte)value.charValue(); - } - - public static Short CharacterToShort(final Character value) { - return value == null ? null : (short)value.charValue(); - } - - public static Integer CharacterToInteger(final Character value) { - return value == null ? null : (int)value; - } - - public static Long CharacterToLong(final Character value) { - return value == null ? null : (long)value; - } - - public static Float CharacterToFloat(final Character value) { - return value == null ? null : (float)value; - } - - public static Double CharacterToDouble(final Character value) { - return value == null ? null : (double)value; - } - - public static String CharacterToString(final Character value) { - return value == null ? null : value.toString(); - } - - public static boolean intToboolean(final int value) { - return value != 0; - } - - public static Byte intToByte(final int value) { - return (byte)value; - } - - public static Short intToShort(final int value) { - return (short)value; - } - - public static Character intToCharacter(final int value) { - return (char)value; - } - - public static Long intToLong(final int value) { - return (long)value; - } - - public static Float intToFloat(final int value) { - return (float)value; - } - - public static Double intToDouble(final int value) { - return (double)value; - } - - public static boolean IntegerToboolean(final Integer value) { - return value != 0; - } - - public static char IntegerTochar(final Integer value) { - return (char)value.intValue(); - } - - public static boolean longToboolean(final long value) { - return value != 0; - } - - public static Byte longToByte(final long value) { - return (byte)value; - } - - public static Short longToShort(final long value) { - return (short)value; - } - - public static Character longToCharacter(final long value) { - return (char)value; - } - - public static Integer longToInteger(final long value) { - return (int)value; - } - - public static Float longToFloat(final long value) { - return (float)value; - } - - public static Double longToDouble(final long value) { - return (double)value; - } - - public static boolean LongToboolean(final Long value) { - return value != 0; - } - - public static char LongTochar(final Long value) { - return (char)value.longValue(); - } - - public static boolean floatToboolean(final float value) { - return value != 0; - } - - public static Byte floatToByte(final float value) { - return (byte)value; - } - - public static Short floatToShort(final float value) { - return (short)value; - } - - public static Character floatToCharacter(final float value) { - return (char)value; - } - - public static Integer floatToInteger(final float value) { - return (int)value; - } - - public static Long floatToLong(final float value) { - return (long)value; - } - - public static Double floatToDouble(final float value) { - return (double)value; - } - - public static boolean FloatToboolean(final Float value) { - return value != 0; - } - - public static char FloatTochar(final Float value) { - return (char)value.floatValue(); - } - - public static boolean doubleToboolean(final double value) { - return value != 0; - } - - public static Byte doubleToByte(final double value) { - return (byte)value; - } - - public static Short doubleToShort(final double value) { - return (short)value; - } - - public static Character doubleToCharacter(final double value) { - return (char)value; - } - - public static Integer doubleToInteger(final double value) { - return (int)value; - } - - public static Long doubleToLong(final double value) { - return (long)value; - } - - public static Float doubleToFloat(final double value) { - return (float)value; - } - - public static boolean DoubleToboolean(final Double value) { - return value != 0; - } - - public static char DoubleTochar(final Double value) { - return (char)value.doubleValue(); - } - public static char StringTochar(final String value) { if (value.length() != 1) { throw new ClassCastException("Cannot cast [String] with length greater than one to [char]."); @@ -469,359 +37,6 @@ public class Utility { return value.charAt(0); } - public static Character StringToCharacter(final String value) { - if (value == null) { - return null; - } - - if (value.length() != 1) { - throw new ClassCastException("Cannot cast [String] with length greater than one to [Character]."); - } - - return value.charAt(0); - } - - // although divide by zero is guaranteed, the special overflow case is not caught. - // its not needed for remainder because it is not possible there. - // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 - - /** - * Integer divide without overflow - * @throws ArithmeticException on overflow or divide-by-zero - */ - public static int divideWithoutOverflow(int x, int y) { - if (x == Integer.MIN_VALUE && y == -1) { - throw new ArithmeticException("integer overflow"); - } - return x / y; - } - - /** - * Long divide without overflow - * @throws ArithmeticException on overflow or divide-by-zero - */ - public static long divideWithoutOverflow(long x, long y) { - if (x == Long.MIN_VALUE && y == -1L) { - throw new ArithmeticException("long overflow"); - } - return x / y; - } - - // byte, short, and char are promoted to int for normal operations, - // so the JDK exact methods are typically used, and the result has a wider range. - // but compound assignments and increment/decrement operators (e.g. byte b = Byte.MAX_VALUE; b++;) - // implicitly cast back to the original type: so these need to be checked against the original range. - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteExact(int value) { - byte s = (byte) value; - if (s != value) { - throw new ArithmeticException("byte overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteExact(long value) { - byte s = (byte) value; - if (s != value) { - throw new ArithmeticException("byte overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteWithoutOverflow(float value) { - if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { - throw new ArithmeticException("byte overflow"); - } - return (byte)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for byte range. - */ - public static byte toByteWithoutOverflow(double value) { - if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) { - throw new ArithmeticException("byte overflow"); - } - return (byte)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(int value) { - short s = (short) value; - if (s != value) { - throw new ArithmeticException("short overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(long value) { - short s = (short) value; - if (s != value) { - throw new ArithmeticException("short overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortWithoutOverflow(float value) { - if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { - throw new ArithmeticException("short overflow"); - } - return (short)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for short range. - */ - public static short toShortExact(double value) { - if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) { - throw new ArithmeticException("short overflow"); - } - return (short)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharExact(int value) { - char s = (char) value; - if (s != value) { - throw new ArithmeticException("char overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharExact(long value) { - char s = (char) value; - if (s != value) { - throw new ArithmeticException("char overflow"); - } - return s; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharWithoutOverflow(float value) { - if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { - throw new ArithmeticException("char overflow"); - } - return (char)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for char range. - */ - public static char toCharWithoutOverflow(double value) { - if (value < Character.MIN_VALUE || value > Character.MAX_VALUE) { - throw new ArithmeticException("char overflow"); - } - return (char)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for int range. - */ - public static int toIntWithoutOverflow(float value) { - if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { - throw new ArithmeticException("int overflow"); - } - return (int)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for int range. - */ - public static int toIntWithoutOverflow(double value) { - if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) { - throw new ArithmeticException("int overflow"); - } - return (int)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for long range. - */ - public static long toLongWithoutOverflow(float value) { - if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { - throw new ArithmeticException("long overflow"); - } - return (long)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for long range. - */ - public static float toLongWithoutOverflow(double value) { - if (value < Long.MIN_VALUE || value > Long.MAX_VALUE) { - throw new ArithmeticException("long overflow"); - } - return (long)value; - } - - /** - * Like {@link Math#toIntExact(long)} but for float range. - */ - public static float toFloatWithoutOverflow(double value) { - if (value < Float.MIN_VALUE || value > Float.MAX_VALUE) { - throw new ArithmeticException("float overflow"); - } - return (float)value; - } - - /** - * Checks for overflow, result is infinite but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static float checkInfFloat(float x, float y, float z) { - if (Float.isInfinite(z)) { - if (Float.isFinite(x) && Float.isFinite(y)) { - throw new ArithmeticException("float overflow"); - } - } - return z; - } - - /** - * Checks for NaN, result is NaN but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static float checkNaNFloat(float x, float y, float z) { - if (Float.isNaN(z)) { - if (Float.isFinite(x) && Float.isFinite(y)) { - throw new ArithmeticException("NaN"); - } - } - return z; - } - - /** - * Checks for NaN, result is infinite but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static double checkInfDouble(double x, double y, double z) { - if (Double.isInfinite(z)) { - if (Double.isFinite(x) && Double.isFinite(y)) { - throw new ArithmeticException("double overflow"); - } - } - return z; - } - - /** - * Checks for NaN, result is NaN but operands are finite - * @throws ArithmeticException if overflow occurred - */ - private static double checkNaNDouble(double x, double y, double z) { - if (Double.isNaN(z)) { - if (Double.isFinite(x) && Double.isFinite(y)) { - throw new ArithmeticException("NaN"); - } - } - return z; - } - - /** - * Adds two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float addWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x + y); - } - - /** - * Adds two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double addWithoutOverflow(double x, double y) { - return checkInfDouble(x, y, x + y); - } - - /** - * Subtracts two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float subtractWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x - y); - } - - /** - * Subtracts two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double subtractWithoutOverflow(double x, double y) { - return checkInfDouble(x, y , x - y); - } - - /** - * Multiplies two floats but throws {@code ArithmeticException} - * if the result overflows. - */ - public static float multiplyWithoutOverflow(float x, float y) { - return checkInfFloat(x, y, x * y); - } - - /** - * Multiplies two doubles but throws {@code ArithmeticException} - * if the result overflows. - */ - public static double multiplyWithoutOverflow(double x, double y) { - return checkInfDouble(x, y, x * y); - } - - /** - * Divides two floats but throws {@code ArithmeticException} - * if the result overflows, or would create NaN from finite - * inputs ({@code x == 0, y == 0}) - */ - public static float divideWithoutOverflow(float x, float y) { - return checkNaNFloat(x, y, checkInfFloat(x, y, x / y)); - } - - /** - * Divides two doubles but throws {@code ArithmeticException} - * if the result overflows, or would create NaN from finite - * inputs ({@code x == 0, y == 0}) - */ - public static double divideWithoutOverflow(double x, double y) { - return checkNaNDouble(x, y, checkInfDouble(x, y, x / y)); - } - - /** - * Takes remainder two floats but throws {@code ArithmeticException} - * if the result would create NaN from finite inputs ({@code y == 0}) - */ - public static float remainderWithoutOverflow(float x, float y) { - return checkNaNFloat(x, y, x % y); - } - - /** - * Divides two doubles but throws {@code ArithmeticException} - * if the result would create NaN from finite inputs ({@code y == 0}) - */ - public static double remainderWithoutOverflow(double x, double y) { - return checkNaNDouble(x, y, x % y); - } - public static boolean checkEquals(final Object left, final Object right) { if (left != null) { return left.equals(right); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java index adf930b017c..4443b1dc0f8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Variables.java @@ -50,7 +50,7 @@ public final class Variables { boolean ctx = false; boolean loop = false; - public void markReserved(final String name) { + public void markReserved(String name) { if (SCORE.equals(name)) { score = true; } else if (CTX.equals(name)) { @@ -58,7 +58,7 @@ public final class Variables { } } - public boolean isReserved(final String name) { + public boolean isReserved(String name) { return name.equals(THIS) || name.equals(PARAMS) || name.equals(SCORER) || name.equals(DOC) || name.equals(VALUE) || name.equals(SCORE) || name.equals(CTX) || name.equals(LOOP); } @@ -77,7 +77,7 @@ public final class Variables { public boolean read = false; - private Variable(final String location, final String name, final Type type, final int slot, final boolean readonly) { + private Variable(String location, String name, Type type, int slot, boolean readonly) { this.location = location; this.name = name; this.type = type; @@ -86,14 +86,12 @@ public final class Variables { } } - private final Definition definition; final Reserved reserved; private final Deque scopes = new ArrayDeque<>(); private final Deque variables = new ArrayDeque<>(); - public Variables(final CompilerSettings settings, final Definition definition, final Reserved reserved) { - this.definition = definition; + public Variables(Reserved reserved) { this.reserved = reserved; incrementScope(); @@ -101,35 +99,35 @@ public final class Variables { // Method variables. // This reference. Internal use only. - addVariable("[" + Reserved.THIS + "]" , definition.execType.name, Reserved.THIS , true, true); + addVariable("[" + Reserved.THIS + "]", Definition.getType("Executable"), Reserved.THIS, true, true); - // Input map of variables passed to the script. TODO: Rename to 'params' since that will be its use. - addVariable("[" + Reserved.PARAMS + "]", definition.smapType.name, Reserved.PARAMS, true, true); + // Input map of variables passed to the script. + addVariable("[" + Reserved.PARAMS + "]", Definition.getType("Map"), Reserved.PARAMS, true, true); // Scorer parameter passed to the script. Internal use only. - addVariable("[" + Reserved.SCORER + "]", definition.defType.name , Reserved.SCORER, true, true); + addVariable("[" + Reserved.SCORER + "]", Definition.DEF_TYPE, Reserved.SCORER, true, true); - // Doc parameter passed to the script. TODO: Currently working as a Map, we can do better? - addVariable("[" + Reserved.DOC + "]" , definition.smapType.name, Reserved.DOC , true, true); + // Doc parameter passed to the script. TODO: Currently working as a Map, we can do better? + addVariable("[" + Reserved.DOC + "]", Definition.getType("Map"), Reserved.DOC, true, true); // Aggregation _value parameter passed to the script. - addVariable("[" + Reserved.VALUE + "]" , definition.defType.name , Reserved.VALUE , true, true); + addVariable("[" + Reserved.VALUE + "]", Definition.DEF_TYPE, Reserved.VALUE, true, true); // Shortcut variables. // Document's score as a read-only double. if (reserved.score) { - addVariable("[" + Reserved.SCORE + "]", definition.doubleType.name, Reserved.SCORE, true, true); + addVariable("[" + Reserved.SCORE + "]", Definition.DOUBLE_TYPE, Reserved.SCORE, true, true); } // The ctx map set by executable scripts as a read-only map. if (reserved.ctx) { - addVariable("[" + Reserved.CTX + "]", definition.smapType.name, Reserved.CTX, true, true); + addVariable("[" + Reserved.CTX + "]", Definition.getType("Map"), Reserved.CTX, true, true); } // Loop counter to catch infinite loops. Internal use only. - if (reserved.loop && settings.getMaxLoopCounter() > 0) { - addVariable("[" + Reserved.LOOP + "]", definition.intType.name, Reserved.LOOP, true, true); + if (reserved.loop) { + addVariable("[" + Reserved.LOOP + "]", Definition.INT_TYPE, Reserved.LOOP, true, true); } } @@ -141,7 +139,7 @@ public final class Variables { int remove = scopes.pop(); while (remove > 0) { - final Variable variable = variables.pop(); + Variable variable = variables.pop(); if (variable.read) { throw new IllegalArgumentException("Error [" + variable.location + "]: Variable [" + variable.name + "] never used."); @@ -151,11 +149,11 @@ public final class Variables { } } - public Variable getVariable(final String location, final String name) { - final Iterator itr = variables.iterator(); + public Variable getVariable(String location, String name) { + Iterator itr = variables.iterator(); while (itr.hasNext()) { - final Variable variable = itr.next(); + Variable variable = itr.next(); if (variable.name.equals(name)) { return variable; @@ -169,8 +167,7 @@ public final class Variables { return null; } - public Variable addVariable(final String location, final String typestr, final String name, - final boolean readonly, final boolean reserved) { + public Variable addVariable(String location, Type type, String name, boolean readonly, boolean reserved) { if (!reserved && this.reserved.isReserved(name)) { throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] is reserved."); } @@ -179,38 +176,23 @@ public final class Variables { throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] already defined."); } - final Type type; - try { - type = definition.getType(typestr); - } catch (final IllegalArgumentException exception) { - throw new IllegalArgumentException("Error " + location + ": Not a type [" + typestr + "]."); - } - - boolean legal = !name.contains("<"); - - try { - definition.getType(name); - legal = false; - } catch (final IllegalArgumentException exception) { + Definition.getType(name); + } catch (IllegalArgumentException exception) { // Do nothing. } - if (!legal) { - throw new IllegalArgumentException("Error " + location + ": Variable name [" + name + "] cannot be a type."); - } - - final Variable previous = variables.peekFirst(); + Variable previous = variables.peekFirst(); int slot = 0; if (previous != null) { slot = previous.slot + previous.type.type.getSize(); } - final Variable variable = new Variable(location, name, type, slot, readonly); + Variable variable = new Variable(location, name, type, slot, readonly); variables.push(variable); - final int update = scopes.pop() + 1; + int update = scopes.pop() + 1; scopes.push(update); return variable; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java index 449361867b9..6e2d0e1431b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Writer.java @@ -38,15 +38,14 @@ import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; */ final class Writer { - static byte[] write(final CompilerSettings settings, final Definition definition, + static byte[] write(final CompilerSettings settings, String name, final String source, final Variables variables, final SSource root) { - final Writer writer = new Writer(settings, definition, name, source, variables, root); + final Writer writer = new Writer(settings, name, source, variables, root); return writer.getBytes(); } private final CompilerSettings settings; - private final Definition definition; private final String scriptName; private final String source; private final Variables variables; @@ -55,10 +54,9 @@ final class Writer { private final ClassWriter writer; private final MethodWriter adapter; - private Writer(final CompilerSettings settings, final Definition definition, + private Writer(final CompilerSettings settings, String name, final String source, final Variables variables, final SSource root) { this.settings = settings; - this.definition = definition; this.scriptName = name; this.source = source; this.variables = variables; @@ -117,7 +115,7 @@ final class Writer { // if we truncated, make it obvious if (limit != source.length()) { fileName.append(" ..."); - } + } fileName.append(" @ "); } else { // its a named script, just use the name @@ -177,7 +175,7 @@ final class Writer { adapter.visitVarInsn(Opcodes.ISTORE, loop.slot); } - root.write(settings, definition, adapter); + root.write(adapter); adapter.endMethod(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index 6bdb9856114..410c06e6fd7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -27,7 +27,6 @@ import org.objectweb.asm.Type; import org.objectweb.asm.commons.Method; import java.lang.invoke.CallSite; -import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.Map; @@ -39,7 +38,7 @@ public final class WriterConstants { public final static String BASE_CLASS_NAME = Executable.class.getName(); public final static Type BASE_CLASS_TYPE = Type.getType(Executable.class); - + public final static String CLASS_NAME = BASE_CLASS_NAME + "$Script"; public final static Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); @@ -56,6 +55,10 @@ public final class WriterConstants { public final static Type MAP_TYPE = Type.getType(Map.class); public final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); + public final static Type UTILITY_TYPE = Type.getType(Utility.class); + public final static Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); + public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); + /** dynamic callsite bootstrap signature */ public final static MethodType DEF_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class); @@ -63,19 +66,35 @@ public final class WriterConstants { new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DefBootstrap.class), "bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString()); - public final static Method DEF_NOT_CALL = getAsmMethod(Object.class, "not", Object.class); - public final static Method DEF_NEG_CALL = getAsmMethod(Object.class, "neg", Object.class); - public final static Method DEF_MUL_CALL = getAsmMethod(Object.class, "mul", Object.class, Object.class); - public final static Method DEF_DIV_CALL = getAsmMethod(Object.class, "div", Object.class, Object.class); - public final static Method DEF_REM_CALL = getAsmMethod(Object.class, "rem", Object.class, Object.class); - public final static Method DEF_ADD_CALL = getAsmMethod(Object.class, "add", Object.class, Object.class); - public final static Method DEF_SUB_CALL = getAsmMethod(Object.class, "sub", Object.class, Object.class); - public final static Method DEF_LSH_CALL = getAsmMethod(Object.class, "lsh", Object.class, int.class); - public final static Method DEF_RSH_CALL = getAsmMethod(Object.class, "rsh", Object.class, int.class); - public final static Method DEF_USH_CALL = getAsmMethod(Object.class, "ush", Object.class, int.class); - public final static Method DEF_AND_CALL = getAsmMethod(Object.class, "and", Object.class, Object.class); - public final static Method DEF_XOR_CALL = getAsmMethod(Object.class, "xor", Object.class, Object.class); - public final static Method DEF_OR_CALL = getAsmMethod(Object.class, "or" , Object.class, Object.class); + public final static Type DEF_UTIL_TYPE = Type.getType(Def.class); + public final static Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); + public final static Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); + public final static Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); + public final static Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); + public final static Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); + public final static Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); + public final static Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); + public final static Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); + public final static Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); + public final static Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); + public final static Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); + public final static Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); + public final static Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); + public final static Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); + public final static Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); + public final static Method DEF_NOT_CALL = getAsmMethod(Object.class , "not", Object.class); + public final static Method DEF_NEG_CALL = getAsmMethod(Object.class , "neg", Object.class); + public final static Method DEF_MUL_CALL = getAsmMethod(Object.class , "mul", Object.class, Object.class); + public final static Method DEF_DIV_CALL = getAsmMethod(Object.class , "div", Object.class, Object.class); + public final static Method DEF_REM_CALL = getAsmMethod(Object.class , "rem", Object.class, Object.class); + public final static Method DEF_ADD_CALL = getAsmMethod(Object.class , "add", Object.class, Object.class); + public final static Method DEF_SUB_CALL = getAsmMethod(Object.class , "sub", Object.class, Object.class); + public final static Method DEF_LSH_CALL = getAsmMethod(Object.class , "lsh", Object.class, int.class); + public final static Method DEF_RSH_CALL = getAsmMethod(Object.class , "rsh", Object.class, int.class); + public final static Method DEF_USH_CALL = getAsmMethod(Object.class , "ush", Object.class, int.class); + public final static Method DEF_AND_CALL = getAsmMethod(Object.class , "and", Object.class, Object.class); + public final static Method DEF_XOR_CALL = getAsmMethod(Object.class , "xor", Object.class, Object.class); + public final static Method DEF_OR_CALL = getAsmMethod(Object.class , "or" , Object.class, Object.class); public final static Method DEF_EQ_CALL = getAsmMethod(boolean.class, "eq" , Object.class, Object.class); public final static Method DEF_LT_CALL = getAsmMethod(boolean.class, "lt" , Object.class, Object.class); public final static Method DEF_LTE_CALL = getAsmMethod(boolean.class, "lte", Object.class, Object.class); @@ -99,9 +118,9 @@ public final class WriterConstants { } INDY_STRING_CONCAT_BOOTSTRAP_HANDLE = bs; } - + public final static int MAX_INDY_STRING_CONCAT_ARGS = 200; - + public final static Type STRING_TYPE = Type.getType(String.class); public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); @@ -116,59 +135,7 @@ public final class WriterConstants { public final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); public final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); - public final static Method TOINTEXACT_LONG = getAsmMethod(int.class, "toIntExact", long.class); - public final static Method NEGATEEXACT_INT = getAsmMethod(int.class, "negateExact", int.class); - public final static Method NEGATEEXACT_LONG = getAsmMethod(long.class, "negateExact", long.class); - public final static Method MULEXACT_INT = getAsmMethod(int.class, "multiplyExact", int.class, int.class); - public final static Method MULEXACT_LONG = getAsmMethod(long.class, "multiplyExact", long.class, long.class); - public final static Method ADDEXACT_INT = getAsmMethod(int.class, "addExact", int.class, int.class); - public final static Method ADDEXACT_LONG = getAsmMethod(long.class, "addExact", long.class, long.class); - public final static Method SUBEXACT_INT = getAsmMethod(int.class, "subtractExact", int.class, int.class); - public final static Method SUBEXACT_LONG = getAsmMethod(long.class, "subtractExact", long.class, long.class); - - public final static Method CHECKEQUALS = - getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); - public final static Method TOBYTEEXACT_INT = getAsmMethod(byte.class, "toByteExact", int.class); - public final static Method TOBYTEEXACT_LONG = getAsmMethod(byte.class, "toByteExact", long.class); - public final static Method TOBYTEWOOVERFLOW_FLOAT = getAsmMethod(byte.class, "toByteWithoutOverflow", float.class); - public final static Method TOBYTEWOOVERFLOW_DOUBLE = getAsmMethod(byte.class, "toByteWithoutOverflow", double.class); - public final static Method TOSHORTEXACT_INT = getAsmMethod(short.class, "toShortExact", int.class); - public final static Method TOSHORTEXACT_LONG = getAsmMethod(short.class, "toShortExact", long.class); - public final static Method TOSHORTWOOVERFLOW_FLOAT = getAsmMethod(short.class, "toShortWithoutOverflow", float.class); - public final static Method TOSHORTWOOVERFLOW_DOUBLE = getAsmMethod(short.class, "toShortWihtoutOverflow", double.class); - public final static Method TOCHAREXACT_INT = getAsmMethod(char.class, "toCharExact", int.class); - public final static Method TOCHAREXACT_LONG = getAsmMethod(char.class, "toCharExact", long.class); - public final static Method TOCHARWOOVERFLOW_FLOAT = getAsmMethod(char.class, "toCharWithoutOverflow", float.class); - public final static Method TOCHARWOOVERFLOW_DOUBLE = getAsmMethod(char.class, "toCharWithoutOverflow", double.class); - public final static Method TOINTWOOVERFLOW_FLOAT = getAsmMethod(int.class, "toIntWithoutOverflow", float.class); - public final static Method TOINTWOOVERFLOW_DOUBLE = getAsmMethod(int.class, "toIntWithoutOverflow", double.class); - public final static Method TOLONGWOOVERFLOW_FLOAT = getAsmMethod(long.class, "toLongWithoutOverflow", float.class); - public final static Method TOLONGWOOVERFLOW_DOUBLE = getAsmMethod(long.class, "toLongWithoutOverflow", double.class); - public final static Method TOFLOATWOOVERFLOW_DOUBLE = getAsmMethod(float.class , "toFloatWihtoutOverflow", double.class); - public final static Method MULWOOVERLOW_FLOAT = - getAsmMethod(float.class, "multiplyWithoutOverflow", float.class, float.class); - public final static Method MULWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "multiplyWithoutOverflow", double.class, double.class); - public final static Method DIVWOOVERLOW_INT = - getAsmMethod(int.class, "divideWithoutOverflow", int.class, int.class); - public final static Method DIVWOOVERLOW_LONG = - getAsmMethod(long.class, "divideWithoutOverflow", long.class, long.class); - public final static Method DIVWOOVERLOW_FLOAT = - getAsmMethod(float.class, "divideWithoutOverflow", float.class, float.class); - public final static Method DIVWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "divideWithoutOverflow", double.class, double.class); - public final static Method REMWOOVERLOW_FLOAT = - getAsmMethod(float.class, "remainderWithoutOverflow", float.class, float.class); - public final static Method REMWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "remainderWithoutOverflow", double.class, double.class); - public final static Method ADDWOOVERLOW_FLOAT = - getAsmMethod(float.class, "addWithoutOverflow", float.class, float.class); - public final static Method ADDWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "addWithoutOverflow", double.class, double.class); - public final static Method SUBWOOVERLOW_FLOAT = - getAsmMethod(float.class, "subtractWithoutOverflow", float.class, float.class); - public final static Method SUBWOOVERLOW_DOUBLE = - getAsmMethod(double.class, "subtractWithoutOverflow", double.class, double.class); + public final static Method CHECKEQUALS = getAsmMethod(boolean.class, "checkEquals", Object.class, Object.class); private static Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index 8cb9a605868..b85217f9490 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -1,5 +1,8 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.painless.antlr; + +import org.elasticsearch.painless.Definition; + import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; @@ -25,10 +28,10 @@ class PainlessLexer extends Lexer { BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, ALSH=60, ARSH=61, AUSH=62, OCTAL=63, HEX=64, INTEGER=65, DECIMAL=66, STRING=67, - TRUE=68, FALSE=69, NULL=70, ID=71, EXTINTEGER=72, EXTID=73; - public static final int EXT = 1; + TRUE=68, FALSE=69, NULL=70, TYPE=71, ID=72, DOTINTEGER=73, DOTID=74; + public static final int AFTER_DOT = 1; public static String[] modeNames = { - "DEFAULT_MODE", "EXT" + "DEFAULT_MODE", "AFTER_DOT" }; public static final String[] ruleNames = { @@ -39,8 +42,8 @@ class PainlessLexer extends Lexer { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", - "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", "EXTINTEGER", - "EXTID" + "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "TYPE", "ID", + "DOTINTEGER", "DOTID" }; private static final String[] _LITERAL_NAMES = { @@ -61,8 +64,8 @@ class PainlessLexer extends Lexer { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", - "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", "EXTINTEGER", - "EXTID" + "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "TYPE", "ID", + "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -118,8 +121,24 @@ class PainlessLexer extends Lexer { @Override public ATN getATN() { return _ATN; } + @Override + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { + switch (ruleIndex) { + case 70: + return TYPE_sempred((RuleContext)_localctx, predIndex); + } + return true; + } + private boolean TYPE_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return Definition.isSimpleType(getText()) ; + } + return true; + } + public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2K\u01fb\b\1\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2L\u0209\b\1\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -128,172 +147,178 @@ class PainlessLexer extends Lexer { "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ - "I\tI\4J\tJ\3\2\6\2\u0098\n\2\r\2\16\2\u0099\3\2\3\2\3\3\3\3\3\3\3\3\7"+ - "\3\u00a2\n\3\f\3\16\3\u00a5\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00ac\n\3\f\3"+ - "\16\3\u00af\13\3\3\3\3\3\5\3\u00b3\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6"+ - "\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\r"+ - "\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20"+ - "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23"+ - "\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25"+ - "\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30"+ - "\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35"+ - "\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3#\3#\3$\3$\3$"+ - "\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3*\3+\3+\3,"+ - "\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\63\3"+ - "\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\38"+ - "\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3=\3>\3>\3>\3>\3?\3?"+ - "\3?\3?\3?\3@\3@\6@\u0179\n@\r@\16@\u017a\3@\5@\u017e\n@\3A\3A\3A\6A\u0183"+ - "\nA\rA\16A\u0184\3A\5A\u0188\nA\3B\3B\3B\7B\u018d\nB\fB\16B\u0190\13B"+ - "\5B\u0192\nB\3B\5B\u0195\nB\3C\3C\3C\7C\u019a\nC\fC\16C\u019d\13C\5C\u019f"+ - "\nC\3C\3C\7C\u01a3\nC\fC\16C\u01a6\13C\3C\3C\5C\u01aa\nC\3C\6C\u01ad\n"+ - "C\rC\16C\u01ae\5C\u01b1\nC\3C\5C\u01b4\nC\3D\3D\3D\3D\3D\3D\7D\u01bc\n"+ - "D\fD\16D\u01bf\13D\3D\3D\3D\3D\3D\3D\3D\7D\u01c8\nD\fD\16D\u01cb\13D\3"+ - "D\5D\u01ce\nD\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3H\3H\7"+ - "H\u01e2\nH\fH\16H\u01e5\13H\3I\3I\3I\7I\u01ea\nI\fI\16I\u01ed\13I\5I\u01ef"+ - "\nI\3I\3I\3J\3J\7J\u01f5\nJ\fJ\16J\u01f8\13J\3J\3J\6\u00a3\u00ad\u01bd"+ - "\u01c9\2K\4\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34"+ - "\17\36\20 \21\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35"+ - ":\36<\37> @!B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66"+ - "l\67n8p9r:t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008c"+ - "G\u008eH\u0090I\u0092J\u0094K\4\2\3\21\5\2\13\f\17\17\"\"\4\2\f\f\17\17"+ - "\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4"+ - "\2GGgg\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aac|\6\2\62;C\\aac|\u0216\2\4\3\2"+ - "\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20"+ - "\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2"+ - "\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3"+ - "\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3"+ - "\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3"+ - "\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2"+ - "\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2"+ - "X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3"+ - "\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2"+ - "\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2"+ - "~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2"+ - "\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2"+ - "\u0090\3\2\2\2\3\u0092\3\2\2\2\3\u0094\3\2\2\2\4\u0097\3\2\2\2\6\u00b2"+ - "\3\2\2\2\b\u00b6\3\2\2\2\n\u00b8\3\2\2\2\f\u00ba\3\2\2\2\16\u00bc\3\2"+ - "\2\2\20\u00be\3\2\2\2\22\u00c0\3\2\2\2\24\u00c2\3\2\2\2\26\u00c6\3\2\2"+ - "\2\30\u00c8\3\2\2\2\32\u00ca\3\2\2\2\34\u00cd\3\2\2\2\36\u00d2\3\2\2\2"+ - " \u00d8\3\2\2\2\"\u00db\3\2\2\2$\u00df\3\2\2\2&\u00e8\3\2\2\2(\u00ee\3"+ - "\2\2\2*\u00f5\3\2\2\2,\u00f9\3\2\2\2.\u00fd\3\2\2\2\60\u0103\3\2\2\2\62"+ - "\u0109\3\2\2\2\64\u010b\3\2\2\2\66\u010d\3\2\2\28\u010f\3\2\2\2:\u0111"+ - "\3\2\2\2<\u0113\3\2\2\2>\u0115\3\2\2\2@\u0117\3\2\2\2B\u011a\3\2\2\2D"+ - "\u011d\3\2\2\2F\u0121\3\2\2\2H\u0123\3\2\2\2J\u0126\3\2\2\2L\u0128\3\2"+ - "\2\2N\u012b\3\2\2\2P\u012e\3\2\2\2R\u0132\3\2\2\2T\u0135\3\2\2\2V\u0139"+ - "\3\2\2\2X\u013b\3\2\2\2Z\u013d\3\2\2\2\\\u013f\3\2\2\2^\u0142\3\2\2\2"+ - "`\u0145\3\2\2\2b\u0147\3\2\2\2d\u0149\3\2\2\2f\u014c\3\2\2\2h\u014f\3"+ - "\2\2\2j\u0151\3\2\2\2l\u0154\3\2\2\2n\u0157\3\2\2\2p\u015a\3\2\2\2r\u015d"+ - "\3\2\2\2t\u0160\3\2\2\2v\u0163\3\2\2\2x\u0166\3\2\2\2z\u0169\3\2\2\2|"+ - "\u016d\3\2\2\2~\u0171\3\2\2\2\u0080\u0176\3\2\2\2\u0082\u017f\3\2\2\2"+ - "\u0084\u0191\3\2\2\2\u0086\u019e\3\2\2\2\u0088\u01cd\3\2\2\2\u008a\u01cf"+ - "\3\2\2\2\u008c\u01d4\3\2\2\2\u008e\u01da\3\2\2\2\u0090\u01df\3\2\2\2\u0092"+ - "\u01ee\3\2\2\2\u0094\u01f2\3\2\2\2\u0096\u0098\t\2\2\2\u0097\u0096\3\2"+ - "\2\2\u0098\u0099\3\2\2\2\u0099\u0097\3\2\2\2\u0099\u009a\3\2\2\2\u009a"+ - "\u009b\3\2\2\2\u009b\u009c\b\2\2\2\u009c\5\3\2\2\2\u009d\u009e\7\61\2"+ - "\2\u009e\u009f\7\61\2\2\u009f\u00a3\3\2\2\2\u00a0\u00a2\13\2\2\2\u00a1"+ - "\u00a0\3\2\2\2\u00a2\u00a5\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a3\u00a1\3\2"+ - "\2\2\u00a4\u00a6\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00b3\t\3\2\2\u00a7"+ - "\u00a8\7\61\2\2\u00a8\u00a9\7,\2\2\u00a9\u00ad\3\2\2\2\u00aa\u00ac\13"+ - "\2\2\2\u00ab\u00aa\3\2\2\2\u00ac\u00af\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ad"+ - "\u00ab\3\2\2\2\u00ae\u00b0\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\u00b1\7,"+ - "\2\2\u00b1\u00b3\7\61\2\2\u00b2\u009d\3\2\2\2\u00b2\u00a7\3\2\2\2\u00b3"+ - "\u00b4\3\2\2\2\u00b4\u00b5\b\3\2\2\u00b5\7\3\2\2\2\u00b6\u00b7\7}\2\2"+ - "\u00b7\t\3\2\2\2\u00b8\u00b9\7\177\2\2\u00b9\13\3\2\2\2\u00ba\u00bb\7"+ - "]\2\2\u00bb\r\3\2\2\2\u00bc\u00bd\7_\2\2\u00bd\17\3\2\2\2\u00be\u00bf"+ - "\7*\2\2\u00bf\21\3\2\2\2\u00c0\u00c1\7+\2\2\u00c1\23\3\2\2\2\u00c2\u00c3"+ - "\7\60\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c5\b\n\3\2\u00c5\25\3\2\2\2\u00c6"+ - "\u00c7\7.\2\2\u00c7\27\3\2\2\2\u00c8\u00c9\7=\2\2\u00c9\31\3\2\2\2\u00ca"+ - "\u00cb\7k\2\2\u00cb\u00cc\7h\2\2\u00cc\33\3\2\2\2\u00cd\u00ce\7g\2\2\u00ce"+ - "\u00cf\7n\2\2\u00cf\u00d0\7u\2\2\u00d0\u00d1\7g\2\2\u00d1\35\3\2\2\2\u00d2"+ - "\u00d3\7y\2\2\u00d3\u00d4\7j\2\2\u00d4\u00d5\7k\2\2\u00d5\u00d6\7n\2\2"+ - "\u00d6\u00d7\7g\2\2\u00d7\37\3\2\2\2\u00d8\u00d9\7f\2\2\u00d9\u00da\7"+ - "q\2\2\u00da!\3\2\2\2\u00db\u00dc\7h\2\2\u00dc\u00dd\7q\2\2\u00dd\u00de"+ - "\7t\2\2\u00de#\3\2\2\2\u00df\u00e0\7e\2\2\u00e0\u00e1\7q\2\2\u00e1\u00e2"+ - "\7p\2\2\u00e2\u00e3\7v\2\2\u00e3\u00e4\7k\2\2\u00e4\u00e5\7p\2\2\u00e5"+ - "\u00e6\7w\2\2\u00e6\u00e7\7g\2\2\u00e7%\3\2\2\2\u00e8\u00e9\7d\2\2\u00e9"+ - "\u00ea\7t\2\2\u00ea\u00eb\7g\2\2\u00eb\u00ec\7c\2\2\u00ec\u00ed\7m\2\2"+ - "\u00ed\'\3\2\2\2\u00ee\u00ef\7t\2\2\u00ef\u00f0\7g\2\2\u00f0\u00f1\7v"+ - "\2\2\u00f1\u00f2\7w\2\2\u00f2\u00f3\7t\2\2\u00f3\u00f4\7p\2\2\u00f4)\3"+ - "\2\2\2\u00f5\u00f6\7p\2\2\u00f6\u00f7\7g\2\2\u00f7\u00f8\7y\2\2\u00f8"+ - "+\3\2\2\2\u00f9\u00fa\7v\2\2\u00fa\u00fb\7t\2\2\u00fb\u00fc\7{\2\2\u00fc"+ - "-\3\2\2\2\u00fd\u00fe\7e\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100"+ - "\u0101\7e\2\2\u0101\u0102\7j\2\2\u0102/\3\2\2\2\u0103\u0104\7v\2\2\u0104"+ - "\u0105\7j\2\2\u0105\u0106\7t\2\2\u0106\u0107\7q\2\2\u0107\u0108\7y\2\2"+ - "\u0108\61\3\2\2\2\u0109\u010a\7#\2\2\u010a\63\3\2\2\2\u010b\u010c\7\u0080"+ - "\2\2\u010c\65\3\2\2\2\u010d\u010e\7,\2\2\u010e\67\3\2\2\2\u010f\u0110"+ - "\7\61\2\2\u01109\3\2\2\2\u0111\u0112\7\'\2\2\u0112;\3\2\2\2\u0113\u0114"+ - "\7-\2\2\u0114=\3\2\2\2\u0115\u0116\7/\2\2\u0116?\3\2\2\2\u0117\u0118\7"+ - ">\2\2\u0118\u0119\7>\2\2\u0119A\3\2\2\2\u011a\u011b\7@\2\2\u011b\u011c"+ - "\7@\2\2\u011cC\3\2\2\2\u011d\u011e\7@\2\2\u011e\u011f\7@\2\2\u011f\u0120"+ - "\7@\2\2\u0120E\3\2\2\2\u0121\u0122\7>\2\2\u0122G\3\2\2\2\u0123\u0124\7"+ - ">\2\2\u0124\u0125\7?\2\2\u0125I\3\2\2\2\u0126\u0127\7@\2\2\u0127K\3\2"+ - "\2\2\u0128\u0129\7@\2\2\u0129\u012a\7?\2\2\u012aM\3\2\2\2\u012b\u012c"+ - "\7?\2\2\u012c\u012d\7?\2\2\u012dO\3\2\2\2\u012e\u012f\7?\2\2\u012f\u0130"+ - "\7?\2\2\u0130\u0131\7?\2\2\u0131Q\3\2\2\2\u0132\u0133\7#\2\2\u0133\u0134"+ - "\7?\2\2\u0134S\3\2\2\2\u0135\u0136\7#\2\2\u0136\u0137\7?\2\2\u0137\u0138"+ - "\7?\2\2\u0138U\3\2\2\2\u0139\u013a\7(\2\2\u013aW\3\2\2\2\u013b\u013c\7"+ - "`\2\2\u013cY\3\2\2\2\u013d\u013e\7~\2\2\u013e[\3\2\2\2\u013f\u0140\7("+ - "\2\2\u0140\u0141\7(\2\2\u0141]\3\2\2\2\u0142\u0143\7~\2\2\u0143\u0144"+ - "\7~\2\2\u0144_\3\2\2\2\u0145\u0146\7A\2\2\u0146a\3\2\2\2\u0147\u0148\7"+ - "<\2\2\u0148c\3\2\2\2\u0149\u014a\7-\2\2\u014a\u014b\7-\2\2\u014be\3\2"+ - "\2\2\u014c\u014d\7/\2\2\u014d\u014e\7/\2\2\u014eg\3\2\2\2\u014f\u0150"+ - "\7?\2\2\u0150i\3\2\2\2\u0151\u0152\7-\2\2\u0152\u0153\7?\2\2\u0153k\3"+ - "\2\2\2\u0154\u0155\7/\2\2\u0155\u0156\7?\2\2\u0156m\3\2\2\2\u0157\u0158"+ - "\7,\2\2\u0158\u0159\7?\2\2\u0159o\3\2\2\2\u015a\u015b\7\61\2\2\u015b\u015c"+ - "\7?\2\2\u015cq\3\2\2\2\u015d\u015e\7\'\2\2\u015e\u015f\7?\2\2\u015fs\3"+ - "\2\2\2\u0160\u0161\7(\2\2\u0161\u0162\7?\2\2\u0162u\3\2\2\2\u0163\u0164"+ - "\7`\2\2\u0164\u0165\7?\2\2\u0165w\3\2\2\2\u0166\u0167\7~\2\2\u0167\u0168"+ - "\7?\2\2\u0168y\3\2\2\2\u0169\u016a\7>\2\2\u016a\u016b\7>\2\2\u016b\u016c"+ - "\7?\2\2\u016c{\3\2\2\2\u016d\u016e\7@\2\2\u016e\u016f\7@\2\2\u016f\u0170"+ - "\7?\2\2\u0170}\3\2\2\2\u0171\u0172\7@\2\2\u0172\u0173\7@\2\2\u0173\u0174"+ - "\7@\2\2\u0174\u0175\7?\2\2\u0175\177\3\2\2\2\u0176\u0178\7\62\2\2\u0177"+ - "\u0179\t\4\2\2\u0178\u0177\3\2\2\2\u0179\u017a\3\2\2\2\u017a\u0178\3\2"+ - "\2\2\u017a\u017b\3\2\2\2\u017b\u017d\3\2\2\2\u017c\u017e\t\5\2\2\u017d"+ - "\u017c\3\2\2\2\u017d\u017e\3\2\2\2\u017e\u0081\3\2\2\2\u017f\u0180\7\62"+ - "\2\2\u0180\u0182\t\6\2\2\u0181\u0183\t\7\2\2\u0182\u0181\3\2\2\2\u0183"+ - "\u0184\3\2\2\2\u0184\u0182\3\2\2\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2"+ - "\2\2\u0186\u0188\t\5\2\2\u0187\u0186\3\2\2\2\u0187\u0188\3\2\2\2\u0188"+ - "\u0083\3\2\2\2\u0189\u0192\7\62\2\2\u018a\u018e\t\b\2\2\u018b\u018d\t"+ - "\t\2\2\u018c\u018b\3\2\2\2\u018d\u0190\3\2\2\2\u018e\u018c\3\2\2\2\u018e"+ - "\u018f\3\2\2\2\u018f\u0192\3\2\2\2\u0190\u018e\3\2\2\2\u0191\u0189\3\2"+ - "\2\2\u0191\u018a\3\2\2\2\u0192\u0194\3\2\2\2\u0193\u0195\t\n\2\2\u0194"+ - "\u0193\3\2\2\2\u0194\u0195\3\2\2\2\u0195\u0085\3\2\2\2\u0196\u019f\7\62"+ - "\2\2\u0197\u019b\t\b\2\2\u0198\u019a\t\t\2\2\u0199\u0198\3\2\2\2\u019a"+ - "\u019d\3\2\2\2\u019b\u0199\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019f\3\2"+ - "\2\2\u019d\u019b\3\2\2\2\u019e\u0196\3\2\2\2\u019e\u0197\3\2\2\2\u019f"+ - "\u01a0\3\2\2\2\u01a0\u01a4\5\24\n\2\u01a1\u01a3\t\t\2\2\u01a2\u01a1\3"+ - "\2\2\2\u01a3\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5"+ - "\u01b0\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01a9\t\13\2\2\u01a8\u01aa\t"+ - "\f\2\2\u01a9\u01a8\3\2\2\2\u01a9\u01aa\3\2\2\2\u01aa\u01ac\3\2\2\2\u01ab"+ - "\u01ad\t\t\2\2\u01ac\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01ac\3\2"+ - "\2\2\u01ae\u01af\3\2\2\2\u01af\u01b1\3\2\2\2\u01b0\u01a7\3\2\2\2\u01b0"+ - "\u01b1\3\2\2\2\u01b1\u01b3\3\2\2\2\u01b2\u01b4\t\r\2\2\u01b3\u01b2\3\2"+ - "\2\2\u01b3\u01b4\3\2\2\2\u01b4\u0087\3\2\2\2\u01b5\u01bd\7$\2\2\u01b6"+ - "\u01b7\7^\2\2\u01b7\u01bc\7$\2\2\u01b8\u01b9\7^\2\2\u01b9\u01bc\7^\2\2"+ - "\u01ba\u01bc\n\16\2\2\u01bb\u01b6\3\2\2\2\u01bb\u01b8\3\2\2\2\u01bb\u01ba"+ - "\3\2\2\2\u01bc\u01bf\3\2\2\2\u01bd\u01be\3\2\2\2\u01bd\u01bb\3\2\2\2\u01be"+ - "\u01c0\3\2\2\2\u01bf\u01bd\3\2\2\2\u01c0\u01ce\7$\2\2\u01c1\u01c9\7)\2"+ - "\2\u01c2\u01c3\7^\2\2\u01c3\u01c8\7)\2\2\u01c4\u01c5\7^\2\2\u01c5\u01c8"+ - "\7^\2\2\u01c6\u01c8\n\16\2\2\u01c7\u01c2\3\2\2\2\u01c7\u01c4\3\2\2\2\u01c7"+ - "\u01c6\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9\u01ca\3\2\2\2\u01c9\u01c7\3\2"+ - "\2\2\u01ca\u01cc\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cc\u01ce\7)\2\2\u01cd"+ - "\u01b5\3\2\2\2\u01cd\u01c1\3\2\2\2\u01ce\u0089\3\2\2\2\u01cf\u01d0\7v"+ - "\2\2\u01d0\u01d1\7t\2\2\u01d1\u01d2\7w\2\2\u01d2\u01d3\7g\2\2\u01d3\u008b"+ - "\3\2\2\2\u01d4\u01d5\7h\2\2\u01d5\u01d6\7c\2\2\u01d6\u01d7\7n\2\2\u01d7"+ - "\u01d8\7u\2\2\u01d8\u01d9\7g\2\2\u01d9\u008d\3\2\2\2\u01da\u01db\7p\2"+ - "\2\u01db\u01dc\7w\2\2\u01dc\u01dd\7n\2\2\u01dd\u01de\7n\2\2\u01de\u008f"+ - "\3\2\2\2\u01df\u01e3\t\17\2\2\u01e0\u01e2\t\20\2\2\u01e1\u01e0\3\2\2\2"+ - "\u01e2\u01e5\3\2\2\2\u01e3\u01e1\3\2\2\2\u01e3\u01e4\3\2\2\2\u01e4\u0091"+ - "\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e6\u01ef\7\62\2\2\u01e7\u01eb\t\b\2\2"+ - "\u01e8\u01ea\t\t\2\2\u01e9\u01e8\3\2\2\2\u01ea\u01ed\3\2\2\2\u01eb\u01e9"+ - "\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ef\3\2\2\2\u01ed\u01eb\3\2\2\2\u01ee"+ - "\u01e6\3\2\2\2\u01ee\u01e7\3\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\bI"+ - "\4\2\u01f1\u0093\3\2\2\2\u01f2\u01f6\t\17\2\2\u01f3\u01f5\t\20\2\2\u01f4"+ - "\u01f3\3\2\2\2\u01f5\u01f8\3\2\2\2\u01f6\u01f4\3\2\2\2\u01f6\u01f7\3\2"+ - "\2\2\u01f7\u01f9\3\2\2\2\u01f8\u01f6\3\2\2\2\u01f9\u01fa\bJ\4\2\u01fa"+ - "\u0095\3\2\2\2\37\2\3\u0099\u00a3\u00ad\u00b2\u017a\u017d\u0184\u0187"+ - "\u018e\u0191\u0194\u019b\u019e\u01a4\u01a9\u01ae\u01b0\u01b3\u01bb\u01bd"+ - "\u01c7\u01c9\u01cd\u01e3\u01eb\u01ee\u01f6\5\b\2\2\4\3\2\4\2\2"; + "I\tI\4J\tJ\4K\tK\3\2\6\2\u009a\n\2\r\2\16\2\u009b\3\2\3\2\3\3\3\3\3\3"+ + "\3\3\7\3\u00a4\n\3\f\3\16\3\u00a7\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00ae\n"+ + "\3\f\3\16\3\u00b1\13\3\3\3\3\3\5\3\u00b5\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3"+ + "\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3"+ + "\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20"+ + "\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ + "\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25"+ + "\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\30"+ + "\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35"+ + "\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3#\3#\3$\3"+ + "$\3$\3%\3%\3&\3&\3&\3\'\3\'\3\'\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3*\3+\3"+ + "+\3,\3,\3-\3-\3.\3.\3.\3/\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\62\3\63"+ + "\3\63\3\63\3\64\3\64\3\65\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\3"+ + "8\38\39\39\39\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3=\3>\3>\3>\3>\3?\3"+ + "?\3?\3?\3?\3@\3@\6@\u017b\n@\r@\16@\u017c\3@\5@\u0180\n@\3A\3A\3A\6A\u0185"+ + "\nA\rA\16A\u0186\3A\5A\u018a\nA\3B\3B\3B\7B\u018f\nB\fB\16B\u0192\13B"+ + "\5B\u0194\nB\3B\5B\u0197\nB\3C\3C\3C\7C\u019c\nC\fC\16C\u019f\13C\5C\u01a1"+ + "\nC\3C\3C\6C\u01a5\nC\rC\16C\u01a6\5C\u01a9\nC\3C\3C\5C\u01ad\nC\3C\6"+ + "C\u01b0\nC\rC\16C\u01b1\5C\u01b4\nC\3C\5C\u01b7\nC\3D\3D\3D\3D\3D\3D\7"+ + "D\u01bf\nD\fD\16D\u01c2\13D\3D\3D\3D\3D\3D\3D\3D\7D\u01cb\nD\fD\16D\u01ce"+ + "\13D\3D\5D\u01d1\nD\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3"+ + "H\3H\3H\3H\7H\u01e7\nH\fH\16H\u01ea\13H\3H\3H\3I\3I\7I\u01f0\nI\fI\16"+ + "I\u01f3\13I\3J\3J\3J\7J\u01f8\nJ\fJ\16J\u01fb\13J\5J\u01fd\nJ\3J\3J\3"+ + "K\3K\7K\u0203\nK\fK\16K\u0206\13K\3K\3K\6\u00a5\u00af\u01c0\u01cc\2L\4"+ + "\3\6\4\b\5\n\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21"+ + "\"\22$\23&\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!"+ + "B\"D#F$H%J&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:"+ + "t;v|?~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090"+ + "I\u0092J\u0094K\u0096L\4\2\3\22\5\2\13\f\17\17\"\"\4\2\f\f\17\17\3\2\62"+ + "9\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg"+ + "\4\2--//\4\2HHhh\4\2$$^^\5\2C\\aac|\6\2\62;C\\aac|\4\2aac|\u0226\2\4\3"+ + "\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2"+ + "\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3"+ + "\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&"+ + "\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62"+ + "\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2"+ + ">\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3"+ + "\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2"+ + "\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2"+ + "\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p"+ + "\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2"+ + "\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086"+ + "\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2"+ + "\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\3\u0094\3\2\2\2\3\u0096\3\2\2\2\4\u0099"+ + "\3\2\2\2\6\u00b4\3\2\2\2\b\u00b8\3\2\2\2\n\u00ba\3\2\2\2\f\u00bc\3\2\2"+ + "\2\16\u00be\3\2\2\2\20\u00c0\3\2\2\2\22\u00c2\3\2\2\2\24\u00c4\3\2\2\2"+ + "\26\u00c8\3\2\2\2\30\u00ca\3\2\2\2\32\u00cc\3\2\2\2\34\u00cf\3\2\2\2\36"+ + "\u00d4\3\2\2\2 \u00da\3\2\2\2\"\u00dd\3\2\2\2$\u00e1\3\2\2\2&\u00ea\3"+ + "\2\2\2(\u00f0\3\2\2\2*\u00f7\3\2\2\2,\u00fb\3\2\2\2.\u00ff\3\2\2\2\60"+ + "\u0105\3\2\2\2\62\u010b\3\2\2\2\64\u010d\3\2\2\2\66\u010f\3\2\2\28\u0111"+ + "\3\2\2\2:\u0113\3\2\2\2<\u0115\3\2\2\2>\u0117\3\2\2\2@\u0119\3\2\2\2B"+ + "\u011c\3\2\2\2D\u011f\3\2\2\2F\u0123\3\2\2\2H\u0125\3\2\2\2J\u0128\3\2"+ + "\2\2L\u012a\3\2\2\2N\u012d\3\2\2\2P\u0130\3\2\2\2R\u0134\3\2\2\2T\u0137"+ + "\3\2\2\2V\u013b\3\2\2\2X\u013d\3\2\2\2Z\u013f\3\2\2\2\\\u0141\3\2\2\2"+ + "^\u0144\3\2\2\2`\u0147\3\2\2\2b\u0149\3\2\2\2d\u014b\3\2\2\2f\u014e\3"+ + "\2\2\2h\u0151\3\2\2\2j\u0153\3\2\2\2l\u0156\3\2\2\2n\u0159\3\2\2\2p\u015c"+ + "\3\2\2\2r\u015f\3\2\2\2t\u0162\3\2\2\2v\u0165\3\2\2\2x\u0168\3\2\2\2z"+ + "\u016b\3\2\2\2|\u016f\3\2\2\2~\u0173\3\2\2\2\u0080\u0178\3\2\2\2\u0082"+ + "\u0181\3\2\2\2\u0084\u0193\3\2\2\2\u0086\u01a0\3\2\2\2\u0088\u01d0\3\2"+ + "\2\2\u008a\u01d2\3\2\2\2\u008c\u01d7\3\2\2\2\u008e\u01dd\3\2\2\2\u0090"+ + "\u01e2\3\2\2\2\u0092\u01ed\3\2\2\2\u0094\u01fc\3\2\2\2\u0096\u0200\3\2"+ + "\2\2\u0098\u009a\t\2\2\2\u0099\u0098\3\2\2\2\u009a\u009b\3\2\2\2\u009b"+ + "\u0099\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009e\b\2"+ + "\2\2\u009e\5\3\2\2\2\u009f\u00a0\7\61\2\2\u00a0\u00a1\7\61\2\2\u00a1\u00a5"+ + "\3\2\2\2\u00a2\u00a4\13\2\2\2\u00a3\u00a2\3\2\2\2\u00a4\u00a7\3\2\2\2"+ + "\u00a5\u00a6\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a8\3\2\2\2\u00a7\u00a5"+ + "\3\2\2\2\u00a8\u00b5\t\3\2\2\u00a9\u00aa\7\61\2\2\u00aa\u00ab\7,\2\2\u00ab"+ + "\u00af\3\2\2\2\u00ac\u00ae\13\2\2\2\u00ad\u00ac\3\2\2\2\u00ae\u00b1\3"+ + "\2\2\2\u00af\u00b0\3\2\2\2\u00af\u00ad\3\2\2\2\u00b0\u00b2\3\2\2\2\u00b1"+ + "\u00af\3\2\2\2\u00b2\u00b3\7,\2\2\u00b3\u00b5\7\61\2\2\u00b4\u009f\3\2"+ + "\2\2\u00b4\u00a9\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6\u00b7\b\3\2\2\u00b7"+ + "\7\3\2\2\2\u00b8\u00b9\7}\2\2\u00b9\t\3\2\2\2\u00ba\u00bb\7\177\2\2\u00bb"+ + "\13\3\2\2\2\u00bc\u00bd\7]\2\2\u00bd\r\3\2\2\2\u00be\u00bf\7_\2\2\u00bf"+ + "\17\3\2\2\2\u00c0\u00c1\7*\2\2\u00c1\21\3\2\2\2\u00c2\u00c3\7+\2\2\u00c3"+ + "\23\3\2\2\2\u00c4\u00c5\7\60\2\2\u00c5\u00c6\3\2\2\2\u00c6\u00c7\b\n\3"+ + "\2\u00c7\25\3\2\2\2\u00c8\u00c9\7.\2\2\u00c9\27\3\2\2\2\u00ca\u00cb\7"+ + "=\2\2\u00cb\31\3\2\2\2\u00cc\u00cd\7k\2\2\u00cd\u00ce\7h\2\2\u00ce\33"+ + "\3\2\2\2\u00cf\u00d0\7g\2\2\u00d0\u00d1\7n\2\2\u00d1\u00d2\7u\2\2\u00d2"+ + "\u00d3\7g\2\2\u00d3\35\3\2\2\2\u00d4\u00d5\7y\2\2\u00d5\u00d6\7j\2\2\u00d6"+ + "\u00d7\7k\2\2\u00d7\u00d8\7n\2\2\u00d8\u00d9\7g\2\2\u00d9\37\3\2\2\2\u00da"+ + "\u00db\7f\2\2\u00db\u00dc\7q\2\2\u00dc!\3\2\2\2\u00dd\u00de\7h\2\2\u00de"+ + "\u00df\7q\2\2\u00df\u00e0\7t\2\2\u00e0#\3\2\2\2\u00e1\u00e2\7e\2\2\u00e2"+ + "\u00e3\7q\2\2\u00e3\u00e4\7p\2\2\u00e4\u00e5\7v\2\2\u00e5\u00e6\7k\2\2"+ + "\u00e6\u00e7\7p\2\2\u00e7\u00e8\7w\2\2\u00e8\u00e9\7g\2\2\u00e9%\3\2\2"+ + "\2\u00ea\u00eb\7d\2\2\u00eb\u00ec\7t\2\2\u00ec\u00ed\7g\2\2\u00ed\u00ee"+ + "\7c\2\2\u00ee\u00ef\7m\2\2\u00ef\'\3\2\2\2\u00f0\u00f1\7t\2\2\u00f1\u00f2"+ + "\7g\2\2\u00f2\u00f3\7v\2\2\u00f3\u00f4\7w\2\2\u00f4\u00f5\7t\2\2\u00f5"+ + "\u00f6\7p\2\2\u00f6)\3\2\2\2\u00f7\u00f8\7p\2\2\u00f8\u00f9\7g\2\2\u00f9"+ + "\u00fa\7y\2\2\u00fa+\3\2\2\2\u00fb\u00fc\7v\2\2\u00fc\u00fd\7t\2\2\u00fd"+ + "\u00fe\7{\2\2\u00fe-\3\2\2\2\u00ff\u0100\7e\2\2\u0100\u0101\7c\2\2\u0101"+ + "\u0102\7v\2\2\u0102\u0103\7e\2\2\u0103\u0104\7j\2\2\u0104/\3\2\2\2\u0105"+ + "\u0106\7v\2\2\u0106\u0107\7j\2\2\u0107\u0108\7t\2\2\u0108\u0109\7q\2\2"+ + "\u0109\u010a\7y\2\2\u010a\61\3\2\2\2\u010b\u010c\7#\2\2\u010c\63\3\2\2"+ + "\2\u010d\u010e\7\u0080\2\2\u010e\65\3\2\2\2\u010f\u0110\7,\2\2\u0110\67"+ + "\3\2\2\2\u0111\u0112\7\61\2\2\u01129\3\2\2\2\u0113\u0114\7\'\2\2\u0114"+ + ";\3\2\2\2\u0115\u0116\7-\2\2\u0116=\3\2\2\2\u0117\u0118\7/\2\2\u0118?"+ + "\3\2\2\2\u0119\u011a\7>\2\2\u011a\u011b\7>\2\2\u011bA\3\2\2\2\u011c\u011d"+ + "\7@\2\2\u011d\u011e\7@\2\2\u011eC\3\2\2\2\u011f\u0120\7@\2\2\u0120\u0121"+ + "\7@\2\2\u0121\u0122\7@\2\2\u0122E\3\2\2\2\u0123\u0124\7>\2\2\u0124G\3"+ + "\2\2\2\u0125\u0126\7>\2\2\u0126\u0127\7?\2\2\u0127I\3\2\2\2\u0128\u0129"+ + "\7@\2\2\u0129K\3\2\2\2\u012a\u012b\7@\2\2\u012b\u012c\7?\2\2\u012cM\3"+ + "\2\2\2\u012d\u012e\7?\2\2\u012e\u012f\7?\2\2\u012fO\3\2\2\2\u0130\u0131"+ + "\7?\2\2\u0131\u0132\7?\2\2\u0132\u0133\7?\2\2\u0133Q\3\2\2\2\u0134\u0135"+ + "\7#\2\2\u0135\u0136\7?\2\2\u0136S\3\2\2\2\u0137\u0138\7#\2\2\u0138\u0139"+ + "\7?\2\2\u0139\u013a\7?\2\2\u013aU\3\2\2\2\u013b\u013c\7(\2\2\u013cW\3"+ + "\2\2\2\u013d\u013e\7`\2\2\u013eY\3\2\2\2\u013f\u0140\7~\2\2\u0140[\3\2"+ + "\2\2\u0141\u0142\7(\2\2\u0142\u0143\7(\2\2\u0143]\3\2\2\2\u0144\u0145"+ + "\7~\2\2\u0145\u0146\7~\2\2\u0146_\3\2\2\2\u0147\u0148\7A\2\2\u0148a\3"+ + "\2\2\2\u0149\u014a\7<\2\2\u014ac\3\2\2\2\u014b\u014c\7-\2\2\u014c\u014d"+ + "\7-\2\2\u014de\3\2\2\2\u014e\u014f\7/\2\2\u014f\u0150\7/\2\2\u0150g\3"+ + "\2\2\2\u0151\u0152\7?\2\2\u0152i\3\2\2\2\u0153\u0154\7-\2\2\u0154\u0155"+ + "\7?\2\2\u0155k\3\2\2\2\u0156\u0157\7/\2\2\u0157\u0158\7?\2\2\u0158m\3"+ + "\2\2\2\u0159\u015a\7,\2\2\u015a\u015b\7?\2\2\u015bo\3\2\2\2\u015c\u015d"+ + "\7\61\2\2\u015d\u015e\7?\2\2\u015eq\3\2\2\2\u015f\u0160\7\'\2\2\u0160"+ + "\u0161\7?\2\2\u0161s\3\2\2\2\u0162\u0163\7(\2\2\u0163\u0164\7?\2\2\u0164"+ + "u\3\2\2\2\u0165\u0166\7`\2\2\u0166\u0167\7?\2\2\u0167w\3\2\2\2\u0168\u0169"+ + "\7~\2\2\u0169\u016a\7?\2\2\u016ay\3\2\2\2\u016b\u016c\7>\2\2\u016c\u016d"+ + "\7>\2\2\u016d\u016e\7?\2\2\u016e{\3\2\2\2\u016f\u0170\7@\2\2\u0170\u0171"+ + "\7@\2\2\u0171\u0172\7?\2\2\u0172}\3\2\2\2\u0173\u0174\7@\2\2\u0174\u0175"+ + "\7@\2\2\u0175\u0176\7@\2\2\u0176\u0177\7?\2\2\u0177\177\3\2\2\2\u0178"+ + "\u017a\7\62\2\2\u0179\u017b\t\4\2\2\u017a\u0179\3\2\2\2\u017b\u017c\3"+ + "\2\2\2\u017c\u017a\3\2\2\2\u017c\u017d\3\2\2\2\u017d\u017f\3\2\2\2\u017e"+ + "\u0180\t\5\2\2\u017f\u017e\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0081\3\2"+ + "\2\2\u0181\u0182\7\62\2\2\u0182\u0184\t\6\2\2\u0183\u0185\t\7\2\2\u0184"+ + "\u0183\3\2\2\2\u0185\u0186\3\2\2\2\u0186\u0184\3\2\2\2\u0186\u0187\3\2"+ + "\2\2\u0187\u0189\3\2\2\2\u0188\u018a\t\5\2\2\u0189\u0188\3\2\2\2\u0189"+ + "\u018a\3\2\2\2\u018a\u0083\3\2\2\2\u018b\u0194\7\62\2\2\u018c\u0190\t"+ + "\b\2\2\u018d\u018f\t\t\2\2\u018e\u018d\3\2\2\2\u018f\u0192\3\2\2\2\u0190"+ + "\u018e\3\2\2\2\u0190\u0191\3\2\2\2\u0191\u0194\3\2\2\2\u0192\u0190\3\2"+ + "\2\2\u0193\u018b\3\2\2\2\u0193\u018c\3\2\2\2\u0194\u0196\3\2\2\2\u0195"+ + "\u0197\t\n\2\2\u0196\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197\u0085\3\2"+ + "\2\2\u0198\u01a1\7\62\2\2\u0199\u019d\t\b\2\2\u019a\u019c\t\t\2\2\u019b"+ + "\u019a\3\2\2\2\u019c\u019f\3\2\2\2\u019d\u019b\3\2\2\2\u019d\u019e\3\2"+ + "\2\2\u019e\u01a1\3\2\2\2\u019f\u019d\3\2\2\2\u01a0\u0198\3\2\2\2\u01a0"+ + "\u0199\3\2\2\2\u01a1\u01a8\3\2\2\2\u01a2\u01a4\5\24\n\2\u01a3\u01a5\t"+ + "\t\2\2\u01a4\u01a3\3\2\2\2\u01a5\u01a6\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6"+ + "\u01a7\3\2\2\2\u01a7\u01a9\3\2\2\2\u01a8\u01a2\3\2\2\2\u01a8\u01a9\3\2"+ + "\2\2\u01a9\u01b3\3\2\2\2\u01aa\u01ac\t\13\2\2\u01ab\u01ad\t\f\2\2\u01ac"+ + "\u01ab\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01af\3\2\2\2\u01ae\u01b0\t\t"+ + "\2\2\u01af\u01ae\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01af\3\2\2\2\u01b1"+ + "\u01b2\3\2\2\2\u01b2\u01b4\3\2\2\2\u01b3\u01aa\3\2\2\2\u01b3\u01b4\3\2"+ + "\2\2\u01b4\u01b6\3\2\2\2\u01b5\u01b7\t\r\2\2\u01b6\u01b5\3\2\2\2\u01b6"+ + "\u01b7\3\2\2\2\u01b7\u0087\3\2\2\2\u01b8\u01c0\7$\2\2\u01b9\u01ba\7^\2"+ + "\2\u01ba\u01bf\7$\2\2\u01bb\u01bc\7^\2\2\u01bc\u01bf\7^\2\2\u01bd\u01bf"+ + "\n\16\2\2\u01be\u01b9\3\2\2\2\u01be\u01bb\3\2\2\2\u01be\u01bd\3\2\2\2"+ + "\u01bf\u01c2\3\2\2\2\u01c0\u01c1\3\2\2\2\u01c0\u01be\3\2\2\2\u01c1\u01c3"+ + "\3\2\2\2\u01c2\u01c0\3\2\2\2\u01c3\u01d1\7$\2\2\u01c4\u01cc\7)\2\2\u01c5"+ + "\u01c6\7^\2\2\u01c6\u01cb\7)\2\2\u01c7\u01c8\7^\2\2\u01c8\u01cb\7^\2\2"+ + "\u01c9\u01cb\n\16\2\2\u01ca\u01c5\3\2\2\2\u01ca\u01c7\3\2\2\2\u01ca\u01c9"+ + "\3\2\2\2\u01cb\u01ce\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cd"+ + "\u01cf\3\2\2\2\u01ce\u01cc\3\2\2\2\u01cf\u01d1\7)\2\2\u01d0\u01b8\3\2"+ + "\2\2\u01d0\u01c4\3\2\2\2\u01d1\u0089\3\2\2\2\u01d2\u01d3\7v\2\2\u01d3"+ + "\u01d4\7t\2\2\u01d4\u01d5\7w\2\2\u01d5\u01d6\7g\2\2\u01d6\u008b\3\2\2"+ + "\2\u01d7\u01d8\7h\2\2\u01d8\u01d9\7c\2\2\u01d9\u01da\7n\2\2\u01da\u01db"+ + "\7u\2\2\u01db\u01dc\7g\2\2\u01dc\u008d\3\2\2\2\u01dd\u01de\7p\2\2\u01de"+ + "\u01df\7w\2\2\u01df\u01e0\7n\2\2\u01e0\u01e1\7n\2\2\u01e1\u008f\3\2\2"+ + "\2\u01e2\u01e8\5\u0092I\2\u01e3\u01e4\5\24\n\2\u01e4\u01e5\5\u0092I\2"+ + "\u01e5\u01e7\3\2\2\2\u01e6\u01e3\3\2\2\2\u01e7\u01ea\3\2\2\2\u01e8\u01e6"+ + "\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\u01eb\3\2\2\2\u01ea\u01e8\3\2\2\2\u01eb"+ + "\u01ec\6H\2\2\u01ec\u0091\3\2\2\2\u01ed\u01f1\t\17\2\2\u01ee\u01f0\t\20"+ + "\2\2\u01ef\u01ee\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01ef\3\2\2\2\u01f1"+ + "\u01f2\3\2\2\2\u01f2\u0093\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u01fd\7\62"+ + "\2\2\u01f5\u01f9\t\b\2\2\u01f6\u01f8\t\t\2\2\u01f7\u01f6\3\2\2\2\u01f8"+ + "\u01fb\3\2\2\2\u01f9\u01f7\3\2\2\2\u01f9\u01fa\3\2\2\2\u01fa\u01fd\3\2"+ + "\2\2\u01fb\u01f9\3\2\2\2\u01fc\u01f4\3\2\2\2\u01fc\u01f5\3\2\2\2\u01fd"+ + "\u01fe\3\2\2\2\u01fe\u01ff\bJ\4\2\u01ff\u0095\3\2\2\2\u0200\u0204\t\21"+ + "\2\2\u0201\u0203\t\20\2\2\u0202\u0201\3\2\2\2\u0203\u0206\3\2\2\2\u0204"+ + "\u0202\3\2\2\2\u0204\u0205\3\2\2\2\u0205\u0207\3\2\2\2\u0206\u0204\3\2"+ + "\2\2\u0207\u0208\bK\4\2\u0208\u0097\3\2\2\2!\2\3\u009b\u00a5\u00af\u00b4"+ + "\u017c\u017f\u0186\u0189\u0190\u0193\u0196\u019d\u01a0\u01a6\u01a8\u01ac"+ + "\u01b1\u01b3\u01b6\u01be\u01c0\u01ca\u01cc\u01d0\u01e8\u01f1\u01f9\u01fc"+ + "\u0204\5\b\2\2\4\3\2\4\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index c8e76ee4bd9..11b38dc9742 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -25,20 +25,17 @@ class PainlessParser extends Parser { BWOR=44, BOOLAND=45, BOOLOR=46, COND=47, COLON=48, INCR=49, DECR=50, ASSIGN=51, AADD=52, ASUB=53, AMUL=54, ADIV=55, AREM=56, AAND=57, AXOR=58, AOR=59, ALSH=60, ARSH=61, AUSH=62, OCTAL=63, HEX=64, INTEGER=65, DECIMAL=66, STRING=67, - TRUE=68, FALSE=69, NULL=70, ID=71, EXTINTEGER=72, EXTID=73; + TRUE=68, FALSE=69, NULL=70, TYPE=71, ID=72, DOTINTEGER=73, DOTID=74; public static final int - RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_emptyscope = 4, - RULE_initializer = 5, RULE_afterthought = 6, RULE_declaration = 7, RULE_decltype = 8, - RULE_declvar = 9, RULE_trap = 10, RULE_identifier = 11, RULE_generic = 12, - RULE_expression = 13, RULE_chain = 14, RULE_linkprec = 15, RULE_linkcast = 16, - RULE_linkbrace = 17, RULE_linkdot = 18, RULE_linkcall = 19, RULE_linkvar = 20, - RULE_linkfield = 21, RULE_linknew = 22, RULE_linkstring = 23, RULE_arguments = 24; + RULE_source = 0, RULE_statement = 1, RULE_trailer = 2, RULE_block = 3, + RULE_empty = 4, RULE_initializer = 5, RULE_afterthought = 6, RULE_declaration = 7, + RULE_decltype = 8, RULE_declvar = 9, RULE_trap = 10, RULE_delimiter = 11, + RULE_expression = 12, RULE_unary = 13, RULE_chain = 14, RULE_primary = 15, + RULE_secondary = 16, RULE_dot = 17, RULE_brace = 18, RULE_arguments = 19; public static final String[] ruleNames = { - "source", "statement", "block", "empty", "emptyscope", "initializer", - "afterthought", "declaration", "decltype", "declvar", "trap", "identifier", - "generic", "expression", "chain", "linkprec", "linkcast", "linkbrace", - "linkdot", "linkcall", "linkvar", "linkfield", "linknew", "linkstring", - "arguments" + "source", "statement", "trailer", "block", "empty", "initializer", "afterthought", + "declaration", "decltype", "declvar", "trap", "delimiter", "expression", + "unary", "chain", "primary", "secondary", "dot", "brace", "arguments" }; private static final String[] _LITERAL_NAMES = { @@ -59,8 +56,8 @@ class PainlessParser extends Parser { "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", - "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "ID", "EXTINTEGER", - "EXTID" + "INTEGER", "DECIMAL", "STRING", "TRUE", "FALSE", "NULL", "TYPE", "ID", + "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -133,25 +130,27 @@ class PainlessParser extends Parser { public final SourceContext source() throws RecognitionException { SourceContext _localctx = new SourceContext(_ctx, getState()); enterRule(_localctx, 0, RULE_source); - int _la; try { + int _alt; enterOuterAlt(_localctx, 1); { - setState(51); + setState(43); _errHandler.sync(this); - _la = _input.LA(1); - do { - { - { - setState(50); - statement(); + _alt = getInterpreter().adaptivePredict(_input,0,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(40); + statement(); + } + } } - } - setState(53); + setState(45); _errHandler.sync(this); - _la = _input.LA(1); - } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); - setState(55); + _alt = getInterpreter().adaptivePredict(_input,0,_ctx); + } + setState(46); match(EOF); } } @@ -181,8 +180,9 @@ class PainlessParser extends Parser { public DeclarationContext declaration() { return getRuleContext(DeclarationContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public DeclContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -192,8 +192,9 @@ class PainlessParser extends Parser { } public static class BreakContext extends StatementContext { public TerminalNode BREAK() { return getToken(PainlessParser.BREAK, 0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public BreakContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -206,8 +207,9 @@ class PainlessParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public ThrowContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -217,8 +219,9 @@ class PainlessParser extends Parser { } public static class ContinueContext extends StatementContext { public TerminalNode CONTINUE() { return getToken(PainlessParser.CONTINUE, 0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public ContinueContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -234,8 +237,8 @@ class PainlessParser extends Parser { return getToken(PainlessParser.SEMICOLON, i); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class,0); } public EmptyContext empty() { return getRuleContext(EmptyContext.class,0); @@ -278,8 +281,9 @@ class PainlessParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public ExprContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -298,8 +302,9 @@ class PainlessParser extends Parser { return getRuleContext(ExpressionContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public DoContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -314,8 +319,8 @@ class PainlessParser extends Parser { return getRuleContext(ExpressionContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class,0); } public EmptyContext empty() { return getRuleContext(EmptyContext.class,0); @@ -334,11 +339,11 @@ class PainlessParser extends Parser { return getRuleContext(ExpressionContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List block() { - return getRuleContexts(BlockContext.class); + public List trailer() { + return getRuleContexts(TrailerContext.class); } - public BlockContext block(int i) { - return getRuleContext(BlockContext.class,i); + public TrailerContext trailer(int i) { + return getRuleContext(TrailerContext.class,i); } public TerminalNode ELSE() { return getToken(PainlessParser.ELSE, 0); } public IfContext(StatementContext ctx) { copyFrom(ctx); } @@ -353,8 +358,9 @@ class PainlessParser extends Parser { public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } - public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } - public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } public ReturnContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { @@ -366,33 +372,38 @@ class PainlessParser extends Parser { public final StatementContext statement() throws RecognitionException { StatementContext _localctx = new StatementContext(_ctx, getState()); enterRule(_localctx, 2, RULE_statement); - int _la; try { int _alt; - setState(125); + setState(117); switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) { case 1: _localctx = new IfContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(57); + setState(48); match(IF); - setState(58); + setState(49); match(LP); - setState(59); + setState(50); expression(0); - setState(60); + setState(51); match(RP); - setState(61); - block(); - setState(64); + setState(52); + trailer(); + setState(56); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: { - setState(62); + setState(53); match(ELSE); - setState(63); - block(); + setState(54); + trailer(); + } + break; + case 2: + { + setState(55); + if (!( _input.LA(1) != ELSE )) throw new FailedPredicateException(this, " _input.LA(1) != ELSE "); } break; } @@ -402,25 +413,25 @@ class PainlessParser extends Parser { _localctx = new WhileContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(66); + setState(58); match(WHILE); - setState(67); + setState(59); match(LP); - setState(68); + setState(60); expression(0); - setState(69); + setState(61); match(RP); - setState(72); + setState(64); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(70); - block(); + setState(62); + trailer(); } break; case 2: { - setState(71); + setState(63); empty(); } break; @@ -431,79 +442,74 @@ class PainlessParser extends Parser { _localctx = new DoContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(74); + setState(66); match(DO); - setState(75); + setState(67); block(); - setState(76); + setState(68); match(WHILE); - setState(77); + setState(69); match(LP); - setState(78); + setState(70); expression(0); - setState(79); + setState(71); match(RP); - setState(80); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(72); + delimiter(); } break; case 4: _localctx = new ForContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(82); + setState(74); match(FOR); - setState(83); + setState(75); match(LP); - setState(85); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + setState(77); + switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) { + case 1: { - setState(84); + setState(76); initializer(); } + break; } - - setState(87); + setState(79); match(SEMICOLON); - setState(89); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + setState(81); + switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) { + case 1: { - setState(88); + setState(80); expression(0); } + break; } - - setState(91); + setState(83); match(SEMICOLON); - setState(93); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + setState(85); + switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { + case 1: { - setState(92); + setState(84); afterthought(); } + break; } - - setState(95); + setState(87); match(RP); - setState(98); + setState(90); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: { - setState(96); - block(); + setState(88); + trailer(); } break; case 2: { - setState(97); + setState(89); empty(); } break; @@ -514,73 +520,53 @@ class PainlessParser extends Parser { _localctx = new DeclContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(100); + setState(92); declaration(); - setState(101); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(93); + delimiter(); } break; case 6: _localctx = new ContinueContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(103); + setState(95); match(CONTINUE); - setState(104); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(96); + delimiter(); } break; case 7: _localctx = new BreakContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(105); + setState(97); match(BREAK); - setState(106); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(98); + delimiter(); } break; case 8: _localctx = new ReturnContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(107); + setState(99); match(RETURN); - setState(108); + setState(100); expression(0); - setState(109); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(101); + delimiter(); } break; case 9: _localctx = new TryContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(111); + setState(103); match(TRY); - setState(112); + setState(104); block(); - setState(114); + setState(106); _errHandler.sync(this); _alt = 1; do { @@ -588,7 +574,7 @@ class PainlessParser extends Parser { case 1: { { - setState(113); + setState(105); trap(); } } @@ -596,7 +582,7 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(116); + setState(108); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,7,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -606,32 +592,73 @@ class PainlessParser extends Parser { _localctx = new ThrowContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(118); + setState(110); match(THROW); - setState(119); + setState(111); expression(0); - setState(120); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } + setState(112); + delimiter(); } break; case 11: _localctx = new ExprContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(122); + setState(114); expression(0); - setState(123); - _la = _input.LA(1); - if ( !(_la==EOF || _la==SEMICOLON) ) { - _errHandler.recoverInline(this); - } else { - consume(); + setState(115); + delimiter(); } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class TrailerContext extends ParserRuleContext { + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public StatementContext statement() { + return getRuleContext(StatementContext.class,0); + } + public TrailerContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_trailer; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrailer(this); + else return visitor.visitChildren(this); + } + } + + public final TrailerContext trailer() throws RecognitionException { + TrailerContext _localctx = new TrailerContext(_ctx, getState()); + enterRule(_localctx, 4, RULE_trailer); + try { + setState(121); + switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(119); + block(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(120); + statement(); } break; } @@ -648,28 +675,6 @@ class PainlessParser extends Parser { } public static class BlockContext extends ParserRuleContext { - public BlockContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_block; } - - public BlockContext() { } - public void copyFrom(BlockContext ctx) { - super.copyFrom(ctx); - } - } - public static class SingleContext extends BlockContext { - public StatementContext statement() { - return getRuleContext(StatementContext.class,0); - } - public SingleContext(BlockContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSingle(this); - else return visitor.visitChildren(this); - } - } - public static class MultipleContext extends BlockContext { public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } public List statement() { @@ -678,80 +683,44 @@ class PainlessParser extends Parser { public StatementContext statement(int i) { return getRuleContext(StatementContext.class,i); } - public MultipleContext(BlockContext ctx) { copyFrom(ctx); } + public BlockContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_block; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitMultiple(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBlock(this); else return visitor.visitChildren(this); } } public final BlockContext block() throws RecognitionException { BlockContext _localctx = new BlockContext(_ctx, getState()); - enterRule(_localctx, 4, RULE_block); - int _la; + enterRule(_localctx, 6, RULE_block); try { - setState(136); - switch (_input.LA(1)) { - case LBRACK: - _localctx = new MultipleContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(127); - match(LBRACK); - setState(129); - _errHandler.sync(this); - _la = _input.LA(1); - do { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(123); + match(LBRACK); + setState(127); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,10,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { { { - setState(128); + setState(124); statement(); } - } - setState(131); - _errHandler.sync(this); - _la = _input.LA(1); - } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0) ); - setState(133); - match(RBRACK); + } } - break; - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case TRUE: - case FALSE: - case NULL: - case ID: - _localctx = new SingleContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(135); - statement(); - } - break; - default: - throw new NoViableAltException(this); + setState(129); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,10,_ctx); + } + setState(130); + match(RBRACK); } } catch (RecognitionException re) { @@ -766,9 +735,6 @@ class PainlessParser extends Parser { } public static class EmptyContext extends ParserRuleContext { - public EmptyscopeContext emptyscope() { - return getRuleContext(EmptyscopeContext.class,0); - } public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } public EmptyContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -783,63 +749,12 @@ class PainlessParser extends Parser { public final EmptyContext empty() throws RecognitionException { EmptyContext _localctx = new EmptyContext(_ctx, getState()); - enterRule(_localctx, 6, RULE_empty); - try { - setState(140); - switch (_input.LA(1)) { - case LBRACK: - enterOuterAlt(_localctx, 1); - { - setState(138); - emptyscope(); - } - break; - case SEMICOLON: - enterOuterAlt(_localctx, 2); - { - setState(139); - match(SEMICOLON); - } - break; - default: - throw new NoViableAltException(this); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class EmptyscopeContext extends ParserRuleContext { - public TerminalNode LBRACK() { return getToken(PainlessParser.LBRACK, 0); } - public TerminalNode RBRACK() { return getToken(PainlessParser.RBRACK, 0); } - public EmptyscopeContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_emptyscope; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitEmptyscope(this); - else return visitor.visitChildren(this); - } - } - - public final EmptyscopeContext emptyscope() throws RecognitionException { - EmptyscopeContext _localctx = new EmptyscopeContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_emptyscope); + enterRule(_localctx, 8, RULE_empty); try { enterOuterAlt(_localctx, 1); { - setState(142); - match(LBRACK); - setState(143); - match(RBRACK); + setState(132); + match(SEMICOLON); } } catch (RecognitionException re) { @@ -875,19 +790,19 @@ class PainlessParser extends Parser { InitializerContext _localctx = new InitializerContext(_ctx, getState()); enterRule(_localctx, 10, RULE_initializer); try { - setState(147); - switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { + setState(136); + switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(145); + setState(134); declaration(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(146); + setState(135); expression(0); } break; @@ -925,7 +840,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(149); + setState(138); expression(0); } } @@ -972,23 +887,23 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(151); + setState(140); decltype(); - setState(152); + setState(141); declvar(); - setState(157); + setState(146); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(153); + setState(142); match(COMMA); - setState(154); + setState(143); declvar(); } } - setState(159); + setState(148); _errHandler.sync(this); _la = _input.LA(1); } @@ -1006,9 +921,7 @@ class PainlessParser extends Parser { } public static class DecltypeContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } public List LBRACE() { return getTokens(PainlessParser.LBRACE); } public TerminalNode LBRACE(int i) { return getToken(PainlessParser.LBRACE, i); @@ -1035,21 +948,21 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(160); - identifier(); - setState(165); + setState(149); + match(TYPE); + setState(154); _errHandler.sync(this); _la = _input.LA(1); while (_la==LBRACE) { { { - setState(161); + setState(150); match(LBRACE); - setState(162); + setState(151); match(RBRACE); } } - setState(167); + setState(156); _errHandler.sync(this); _la = _input.LA(1); } @@ -1067,9 +980,7 @@ class PainlessParser extends Parser { } public static class DeclvarContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public TerminalNode ASSIGN() { return getToken(PainlessParser.ASSIGN, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); @@ -1092,15 +1003,15 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(168); - identifier(); - setState(171); + setState(157); + match(ID); + setState(160); _la = _input.LA(1); if (_la==ASSIGN) { { - setState(169); + setState(158); match(ASSIGN); - setState(170); + setState(159); expression(0); } } @@ -1121,19 +1032,12 @@ class PainlessParser extends Parser { public static class TrapContext extends ParserRuleContext { public TerminalNode CATCH() { return getToken(PainlessParser.CATCH, 0); } public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List identifier() { - return getRuleContexts(IdentifierContext.class); - } - public IdentifierContext identifier(int i) { - return getRuleContext(IdentifierContext.class,i); - } public BlockContext block() { return getRuleContext(BlockContext.class,0); } - public EmptyscopeContext emptyscope() { - return getRuleContext(EmptyscopeContext.class,0); - } public TrapContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1151,79 +1055,18 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(173); + setState(162); match(CATCH); - setState(174); + setState(163); match(LP); - { - setState(175); - identifier(); - setState(176); - identifier(); - } - setState(178); - match(RP); - setState(181); - switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { - case 1: - { - setState(179); - block(); - } - break; - case 2: - { - setState(180); - emptyscope(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class IdentifierContext extends ParserRuleContext { - public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } - public GenericContext generic() { - return getRuleContext(GenericContext.class,0); - } - public IdentifierContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_identifier; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIdentifier(this); - else return visitor.visitChildren(this); - } - } - - public final IdentifierContext identifier() throws RecognitionException { - IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_identifier); - try { - enterOuterAlt(_localctx, 1); - { - setState(183); + setState(164); + match(TYPE); + setState(165); match(ID); - setState(185); - switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { - case 1: - { - setState(184); - generic(); - } - break; - } + setState(166); + match(RP); + setState(167); + block(); } } catch (RecognitionException re) { @@ -1237,59 +1080,34 @@ class PainlessParser extends Parser { return _localctx; } - public static class GenericContext extends ParserRuleContext { - public TerminalNode LT() { return getToken(PainlessParser.LT, 0); } - public List identifier() { - return getRuleContexts(IdentifierContext.class); - } - public IdentifierContext identifier(int i) { - return getRuleContext(IdentifierContext.class,i); - } - public TerminalNode GT() { return getToken(PainlessParser.GT, 0); } - public List COMMA() { return getTokens(PainlessParser.COMMA); } - public TerminalNode COMMA(int i) { - return getToken(PainlessParser.COMMA, i); - } - public GenericContext(ParserRuleContext parent, int invokingState) { + public static class DelimiterContext extends ParserRuleContext { + public TerminalNode SEMICOLON() { return getToken(PainlessParser.SEMICOLON, 0); } + public TerminalNode EOF() { return getToken(PainlessParser.EOF, 0); } + public DelimiterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_generic; } + @Override public int getRuleIndex() { return RULE_delimiter; } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitGeneric(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDelimiter(this); else return visitor.visitChildren(this); } } - public final GenericContext generic() throws RecognitionException { - GenericContext _localctx = new GenericContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_generic); + public final DelimiterContext delimiter() throws RecognitionException { + DelimiterContext _localctx = new DelimiterContext(_ctx, getState()); + enterRule(_localctx, 22, RULE_delimiter); int _la; try { enterOuterAlt(_localctx, 1); { - setState(187); - match(LT); - setState(188); - identifier(); - setState(193); - _errHandler.sync(this); + setState(169); _la = _input.LA(1); - while (_la==COMMA) { - { - { - setState(189); - match(COMMA); - setState(190); - identifier(); - } - } - setState(195); - _errHandler.sync(this); - _la = _input.LA(1); + if ( !(_la==EOF || _la==SEMICOLON) ) { + _errHandler.recoverInline(this); + } else { + consume(); } - setState(196); - match(GT); } } catch (RecognitionException re) { @@ -1304,6 +1122,7 @@ class PainlessParser extends Parser { } public static class ExpressionContext extends ParserRuleContext { + public boolean s = true; public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1312,6 +1131,19 @@ class PainlessParser extends Parser { public ExpressionContext() { } public void copyFrom(ExpressionContext ctx) { super.copyFrom(ctx); + this.s = ctx.s; + } + } + public static class SingleContext extends ExpressionContext { + public UnaryContext u; + public UnaryContext unary() { + return getRuleContext(UnaryContext.class,0); + } + public SingleContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSingle(this); + else return visitor.visitChildren(this); } } public static class CompContext extends ExpressionContext { @@ -1336,17 +1168,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class ReadContext extends ExpressionContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public ReadContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRead(this); - else return visitor.visitChildren(this); - } - } public static class BoolContext extends ExpressionContext { public List expression() { return getRuleContexts(ExpressionContext.class); @@ -1364,6 +1185,8 @@ class PainlessParser extends Parser { } } public static class ConditionalContext extends ExpressionContext { + public ExpressionContext e0; + public ExpressionContext e1; public List expression() { return getRuleContexts(ExpressionContext.class); } @@ -1405,106 +1228,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class FalseContext extends ExpressionContext { - public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } - public FalseContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); - else return visitor.visitChildren(this); - } - } - public static class NumericContext extends ExpressionContext { - public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } - public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } - public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } - public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } - public NumericContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); - else return visitor.visitChildren(this); - } - } - public static class UnaryContext extends ExpressionContext { - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode BOOLNOT() { return getToken(PainlessParser.BOOLNOT, 0); } - public TerminalNode BWNOT() { return getToken(PainlessParser.BWNOT, 0); } - public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } - public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } - public UnaryContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitUnary(this); - else return visitor.visitChildren(this); - } - } - public static class PrecedenceContext extends ExpressionContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public PrecedenceContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPrecedence(this); - else return visitor.visitChildren(this); - } - } - public static class PreincContext extends ExpressionContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } - public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } - public PreincContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPreinc(this); - else return visitor.visitChildren(this); - } - } - public static class PostincContext extends ExpressionContext { - public ChainContext chain() { - return getRuleContext(ChainContext.class,0); - } - public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } - public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } - public PostincContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPostinc(this); - else return visitor.visitChildren(this); - } - } - public static class CastContext extends ExpressionContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public DecltypeContext decltype() { - return getRuleContext(DecltypeContext.class,0); - } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public CastContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCast(this); - else return visitor.visitChildren(this); - } - } - public static class NullContext extends ExpressionContext { - public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } - public NullContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); - else return visitor.visitChildren(this); - } - } public static class BinaryContext extends ExpressionContext { public List expression() { return getRuleContexts(ExpressionContext.class); @@ -1530,15 +1253,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class TrueContext extends ExpressionContext { - public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } - public TrueContext(ExpressionContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); - else return visitor.visitChildren(this); - } - } public final ExpressionContext expression() throws RecognitionException { return expression(0); @@ -1549,339 +1263,235 @@ class PainlessParser extends Parser { int _parentState = getState(); ExpressionContext _localctx = new ExpressionContext(_ctx, _parentState); ExpressionContext _prevctx = _localctx; - int _startState = 26; - enterRecursionRule(_localctx, 26, RULE_expression, _p); + int _startState = 24; + enterRecursionRule(_localctx, 24, RULE_expression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(224); - switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { + setState(180); + switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: - { - _localctx = new UnaryContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - - setState(199); - _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(200); - expression(14); - } - break; - case 2: - { - _localctx = new CastContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(201); - match(LP); - setState(202); - decltype(); - setState(203); - match(RP); - setState(204); - expression(13); - } - break; - case 3: { _localctx = new AssignmentContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(206); - chain(); - setState(207); + + setState(172); + chain(true); + setState(173); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ASSIGN) | (1L << AADD) | (1L << ASUB) | (1L << AMUL) | (1L << ADIV) | (1L << AREM) | (1L << AAND) | (1L << AXOR) | (1L << AOR) | (1L << ALSH) | (1L << ARSH) | (1L << AUSH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(208); + setState(174); expression(1); + ((AssignmentContext)_localctx).s = false; } break; - case 4: + case 2: { - _localctx = new PrecedenceContext(_localctx); + _localctx = new SingleContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(210); - match(LP); - setState(211); - expression(0); - setState(212); - match(RP); - } - break; - case 5: - { - _localctx = new NumericContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(214); - _la = _input.LA(1); - if ( !(((((_la - 63)) & ~0x3f) == 0 && ((1L << (_la - 63)) & ((1L << (OCTAL - 63)) | (1L << (HEX - 63)) | (1L << (INTEGER - 63)) | (1L << (DECIMAL - 63)))) != 0)) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - break; - case 6: - { - _localctx = new TrueContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(215); - match(TRUE); - } - break; - case 7: - { - _localctx = new FalseContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(216); - match(FALSE); - } - break; - case 8: - { - _localctx = new NullContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(217); - match(NULL); - } - break; - case 9: - { - _localctx = new PostincContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(218); - chain(); - setState(219); - _la = _input.LA(1); - if ( !(_la==INCR || _la==DECR) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - } - break; - case 10: - { - _localctx = new PreincContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(221); - _la = _input.LA(1); - if ( !(_la==INCR || _la==DECR) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(222); - chain(); - } - break; - case 11: - { - _localctx = new ReadContext(_localctx); - _ctx = _localctx; - _prevctx = _localctx; - setState(223); - chain(); + setState(177); + ((SingleContext)_localctx).u = unary(false); + ((SingleContext)_localctx).s = ((SingleContext)_localctx).u.s; } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(241); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(262); - switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { + setState(239); + switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(226); + setState(182); if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(227); + setState(183); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(228); + setState(184); expression(13); + ((BinaryContext)_localctx).s = false; } break; case 2: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(229); + setState(187); if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(230); + setState(188); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(231); + setState(189); expression(12); + ((BinaryContext)_localctx).s = false; } break; case 3: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(232); + setState(192); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(233); + setState(193); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(234); + setState(194); expression(11); + ((BinaryContext)_localctx).s = false; } break; case 4: { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(235); + setState(197); if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(236); + setState(198); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(237); + setState(199); expression(10); + ((CompContext)_localctx).s = false; } break; case 5: { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(238); + setState(202); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(239); + setState(203); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(240); + setState(204); expression(9); + ((CompContext)_localctx).s = false; } break; case 6: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(241); + setState(207); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(242); + setState(208); match(BWAND); - setState(243); + setState(209); expression(8); + ((BinaryContext)_localctx).s = false; } break; case 7: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(244); + setState(212); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(245); + setState(213); match(XOR); - setState(246); + setState(214); expression(7); + ((BinaryContext)_localctx).s = false; } break; case 8: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(247); + setState(217); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(248); + setState(218); match(BWOR); - setState(249); + setState(219); expression(6); + ((BinaryContext)_localctx).s = false; } break; case 9: { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(250); + setState(222); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(251); + setState(223); match(BOOLAND); - setState(252); + setState(224); expression(5); + ((BoolContext)_localctx).s = false; } break; case 10: { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(253); + setState(227); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(254); + setState(228); match(BOOLOR); - setState(255); + setState(229); expression(4); + ((BoolContext)_localctx).s = false; } break; case 11: { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(256); + setState(232); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(257); + setState(233); match(COND); - setState(258); - expression(0); - setState(259); + setState(234); + ((ConditionalContext)_localctx).e0 = expression(0); + setState(235); match(COLON); - setState(260); - expression(2); + setState(236); + ((ConditionalContext)_localctx).e1 = expression(2); + ((ConditionalContext)_localctx).s = ((ConditionalContext)_localctx).e0.s && ((ConditionalContext)_localctx).e1.s; } break; } } } - setState(266); + setState(243); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } } } @@ -1896,270 +1506,262 @@ class PainlessParser extends Parser { return _localctx; } - public static class ChainContext extends ParserRuleContext { - public LinkprecContext linkprec() { - return getRuleContext(LinkprecContext.class,0); - } - public LinkcastContext linkcast() { - return getRuleContext(LinkcastContext.class,0); - } - public LinkvarContext linkvar() { - return getRuleContext(LinkvarContext.class,0); - } - public LinknewContext linknew() { - return getRuleContext(LinknewContext.class,0); - } - public LinkstringContext linkstring() { - return getRuleContext(LinkstringContext.class,0); - } - public ChainContext(ParserRuleContext parent, int invokingState) { + public static class UnaryContext extends ParserRuleContext { + public boolean c; + public boolean s = true; + public UnaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } + public UnaryContext(ParserRuleContext parent, int invokingState, boolean c) { super(parent, invokingState); + this.c = c; } - @Override public int getRuleIndex() { return RULE_chain; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitChain(this); - else return visitor.visitChildren(this); + @Override public int getRuleIndex() { return RULE_unary; } + + public UnaryContext() { } + public void copyFrom(UnaryContext ctx) { + super.copyFrom(ctx); + this.c = ctx.c; + this.s = ctx.s; } } - - public final ChainContext chain() throws RecognitionException { - ChainContext _localctx = new ChainContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_chain); - try { - setState(272); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { - case 1: - enterOuterAlt(_localctx, 1); - { - setState(267); - linkprec(); - } - break; - case 2: - enterOuterAlt(_localctx, 2); - { - setState(268); - linkcast(); - } - break; - case 3: - enterOuterAlt(_localctx, 3); - { - setState(269); - linkvar(); - } - break; - case 4: - enterOuterAlt(_localctx, 4); - { - setState(270); - linknew(); - } - break; - case 5: - enterOuterAlt(_localctx, 5); - { - setState(271); - linkstring(); - } - break; - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinkprecContext extends ParserRuleContext { - public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } - public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public LinkprecContext linkprec() { - return getRuleContext(LinkprecContext.class,0); - } - public LinkcastContext linkcast() { - return getRuleContext(LinkcastContext.class,0); - } - public LinkvarContext linkvar() { - return getRuleContext(LinkvarContext.class,0); - } - public LinknewContext linknew() { - return getRuleContext(LinknewContext.class,0); - } - public LinkstringContext linkstring() { - return getRuleContext(LinkstringContext.class,0); - } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkprecContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkprec; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkprec(this); - else return visitor.visitChildren(this); - } - } - - public final LinkprecContext linkprec() throws RecognitionException { - LinkprecContext _localctx = new LinkprecContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_linkprec); - try { - enterOuterAlt(_localctx, 1); - { - setState(274); - match(LP); - setState(280); - switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { - case 1: - { - setState(275); - linkprec(); - } - break; - case 2: - { - setState(276); - linkcast(); - } - break; - case 3: - { - setState(277); - linkvar(); - } - break; - case 4: - { - setState(278); - linknew(); - } - break; - case 5: - { - setState(279); - linkstring(); - } - break; - } - setState(282); - match(RP); - setState(285); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { - case 1: - { - setState(283); - linkdot(); - } - break; - case 2: - { - setState(284); - linkbrace(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinkcastContext extends ParserRuleContext { + public static class CastContext extends UnaryContext { public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } public DecltypeContext decltype() { return getRuleContext(DecltypeContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public LinkprecContext linkprec() { - return getRuleContext(LinkprecContext.class,0); + public UnaryContext unary() { + return getRuleContext(UnaryContext.class,0); } - public LinkcastContext linkcast() { - return getRuleContext(LinkcastContext.class,0); - } - public LinkvarContext linkvar() { - return getRuleContext(LinkvarContext.class,0); - } - public LinknewContext linknew() { - return getRuleContext(LinknewContext.class,0); - } - public LinkstringContext linkstring() { - return getRuleContext(LinkstringContext.class,0); - } - public LinkcastContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkcast; } + public CastContext(UnaryContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkcast(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCast(this); + else return visitor.visitChildren(this); + } + } + public static class PreContext extends UnaryContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class,0); + } + public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } + public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } + public PreContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPre(this); + else return visitor.visitChildren(this); + } + } + public static class ReadContext extends UnaryContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class,0); + } + public ReadContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitRead(this); + else return visitor.visitChildren(this); + } + } + public static class PostContext extends UnaryContext { + public ChainContext chain() { + return getRuleContext(ChainContext.class,0); + } + public TerminalNode INCR() { return getToken(PainlessParser.INCR, 0); } + public TerminalNode DECR() { return getToken(PainlessParser.DECR, 0); } + public PostContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitPost(this); + else return visitor.visitChildren(this); + } + } + public static class NullContext extends UnaryContext { + public TerminalNode NULL() { return getToken(PainlessParser.NULL, 0); } + public NullContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNull(this); + else return visitor.visitChildren(this); + } + } + public static class TrueContext extends UnaryContext { + public TerminalNode TRUE() { return getToken(PainlessParser.TRUE, 0); } + public TrueContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTrue(this); + else return visitor.visitChildren(this); + } + } + public static class FalseContext extends UnaryContext { + public TerminalNode FALSE() { return getToken(PainlessParser.FALSE, 0); } + public FalseContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFalse(this); + else return visitor.visitChildren(this); + } + } + public static class NumericContext extends UnaryContext { + public TerminalNode OCTAL() { return getToken(PainlessParser.OCTAL, 0); } + public TerminalNode HEX() { return getToken(PainlessParser.HEX, 0); } + public TerminalNode INTEGER() { return getToken(PainlessParser.INTEGER, 0); } + public TerminalNode DECIMAL() { return getToken(PainlessParser.DECIMAL, 0); } + public NumericContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNumeric(this); + else return visitor.visitChildren(this); + } + } + public static class OperatorContext extends UnaryContext { + public UnaryContext unary() { + return getRuleContext(UnaryContext.class,0); + } + public TerminalNode BOOLNOT() { return getToken(PainlessParser.BOOLNOT, 0); } + public TerminalNode BWNOT() { return getToken(PainlessParser.BWNOT, 0); } + public TerminalNode ADD() { return getToken(PainlessParser.ADD, 0); } + public TerminalNode SUB() { return getToken(PainlessParser.SUB, 0); } + public OperatorContext(UnaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitOperator(this); else return visitor.visitChildren(this); } } - public final LinkcastContext linkcast() throws RecognitionException { - LinkcastContext _localctx = new LinkcastContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_linkcast); + public final UnaryContext unary(boolean c) throws RecognitionException { + UnaryContext _localctx = new UnaryContext(_ctx, getState(), c); + enterRule(_localctx, 26, RULE_unary); + int _la; try { - enterOuterAlt(_localctx, 1); - { - setState(287); - match(LP); - setState(288); - decltype(); - setState(289); - match(RP); - setState(295); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + setState(273); + switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { case 1: + _localctx = new PreContext(_localctx); + enterOuterAlt(_localctx, 1); { - setState(290); - linkprec(); + setState(244); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(245); + _la = _input.LA(1); + if ( !(_la==INCR || _la==DECR) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(246); + chain(true); } break; case 2: + _localctx = new PostContext(_localctx); + enterOuterAlt(_localctx, 2); { - setState(291); - linkcast(); + setState(247); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(248); + chain(true); + setState(249); + _la = _input.LA(1); + if ( !(_la==INCR || _la==DECR) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } } break; case 3: + _localctx = new ReadContext(_localctx); + enterOuterAlt(_localctx, 3); { - setState(292); - linkvar(); + setState(251); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(252); + chain(false); } break; case 4: + _localctx = new NumericContext(_localctx); + enterOuterAlt(_localctx, 4); { - setState(293); - linknew(); + setState(253); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(254); + _la = _input.LA(1); + if ( !(((((_la - 63)) & ~0x3f) == 0 && ((1L << (_la - 63)) & ((1L << (OCTAL - 63)) | (1L << (HEX - 63)) | (1L << (INTEGER - 63)) | (1L << (DECIMAL - 63)))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + ((NumericContext)_localctx).s = false; } break; case 5: + _localctx = new TrueContext(_localctx); + enterOuterAlt(_localctx, 5); { - setState(294); - linkstring(); + setState(256); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(257); + match(TRUE); + ((TrueContext)_localctx).s = false; + } + break; + case 6: + _localctx = new FalseContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(259); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(260); + match(FALSE); + ((FalseContext)_localctx).s = false; + } + break; + case 7: + _localctx = new NullContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(262); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(263); + match(NULL); + ((NullContext)_localctx).s = false; + } + break; + case 8: + _localctx = new OperatorContext(_localctx); + enterOuterAlt(_localctx, 8); + { + setState(265); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(266); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + setState(267); + unary(false); + } + break; + case 9: + _localctx = new CastContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(268); + match(LP); + setState(269); + decltype(); + setState(270); + match(RP); + setState(271); + unary(_localctx.c); } break; - } } } catch (RecognitionException re) { @@ -2173,314 +1775,62 @@ class PainlessParser extends Parser { return _localctx; } - public static class LinkbraceContext extends ParserRuleContext { - public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkbraceContext(ParserRuleContext parent, int invokingState) { + public static class ChainContext extends ParserRuleContext { + public boolean c; + public ChainContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } + public ChainContext(ParserRuleContext parent, int invokingState, boolean c) { super(parent, invokingState); + this.c = c; } - @Override public int getRuleIndex() { return RULE_linkbrace; } + @Override public int getRuleIndex() { return RULE_chain; } + + public ChainContext() { } + public void copyFrom(ChainContext ctx) { + super.copyFrom(ctx); + this.c = ctx.c; + } + } + public static class StaticContext extends ChainContext { + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public DotContext dot() { + return getRuleContext(DotContext.class,0); + } + public List secondary() { + return getRuleContexts(SecondaryContext.class); + } + public SecondaryContext secondary(int i) { + return getRuleContext(SecondaryContext.class,i); + } + public StaticContext(ChainContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkbrace(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitStatic(this); else return visitor.visitChildren(this); } } - - public final LinkbraceContext linkbrace() throws RecognitionException { - LinkbraceContext _localctx = new LinkbraceContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_linkbrace); - try { - enterOuterAlt(_localctx, 1); - { - setState(297); - match(LBRACE); - setState(298); - expression(0); - setState(299); - match(RBRACE); - setState(302); - switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { - case 1: - { - setState(300); - linkdot(); - } - break; - case 2: - { - setState(301); - linkbrace(); - } - break; - } - } + public static class DynamicContext extends ChainContext { + public PrimaryContext p; + public PrimaryContext primary() { + return getRuleContext(PrimaryContext.class,0); } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); + public List secondary() { + return getRuleContexts(SecondaryContext.class); } - finally { - exitRule(); + public SecondaryContext secondary(int i) { + return getRuleContext(SecondaryContext.class,i); } - return _localctx; - } - - public static class LinkdotContext extends ParserRuleContext { - public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } - public LinkcallContext linkcall() { - return getRuleContext(LinkcallContext.class,0); - } - public LinkfieldContext linkfield() { - return getRuleContext(LinkfieldContext.class,0); - } - public LinkdotContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkdot; } + public DynamicContext(ChainContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkdot(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitDynamic(this); else return visitor.visitChildren(this); } } - - public final LinkdotContext linkdot() throws RecognitionException { - LinkdotContext _localctx = new LinkdotContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_linkdot); - try { - enterOuterAlt(_localctx, 1); - { - setState(304); - match(DOT); - setState(307); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { - case 1: - { - setState(305); - linkcall(); - } - break; - case 2: - { - setState(306); - linkfield(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinkcallContext extends ParserRuleContext { - public TerminalNode EXTID() { return getToken(PainlessParser.EXTID, 0); } - public ArgumentsContext arguments() { - return getRuleContext(ArgumentsContext.class,0); - } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkcallContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkcall; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkcall(this); - else return visitor.visitChildren(this); - } - } - - public final LinkcallContext linkcall() throws RecognitionException { - LinkcallContext _localctx = new LinkcallContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_linkcall); - try { - enterOuterAlt(_localctx, 1); - { - setState(309); - match(EXTID); - setState(310); - arguments(); - setState(313); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { - case 1: - { - setState(311); - linkdot(); - } - break; - case 2: - { - setState(312); - linkbrace(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinkvarContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkvarContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkvar; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkvar(this); - else return visitor.visitChildren(this); - } - } - - public final LinkvarContext linkvar() throws RecognitionException { - LinkvarContext _localctx = new LinkvarContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_linkvar); - try { - enterOuterAlt(_localctx, 1); - { - setState(315); - identifier(); - setState(318); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { - case 1: - { - setState(316); - linkdot(); - } - break; - case 2: - { - setState(317); - linkbrace(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinkfieldContext extends ParserRuleContext { - public TerminalNode EXTID() { return getToken(PainlessParser.EXTID, 0); } - public TerminalNode EXTINTEGER() { return getToken(PainlessParser.EXTINTEGER, 0); } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkfieldContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_linkfield; } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkfield(this); - else return visitor.visitChildren(this); - } - } - - public final LinkfieldContext linkfield() throws RecognitionException { - LinkfieldContext _localctx = new LinkfieldContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_linkfield); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(320); - _la = _input.LA(1); - if ( !(_la==EXTINTEGER || _la==EXTID) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(323); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { - case 1: - { - setState(321); - linkdot(); - } - break; - case 2: - { - setState(322); - linkbrace(); - } - break; - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - - public static class LinknewContext extends ParserRuleContext { + public static class NewarrayContext extends ChainContext { public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); - } - public ArgumentsContext arguments() { - return getRuleContext(ArgumentsContext.class,0); - } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } public List LBRACE() { return getTokens(PainlessParser.LBRACE); } public TerminalNode LBRACE(int i) { return getToken(PainlessParser.LBRACE, i); @@ -2495,51 +1845,89 @@ class PainlessParser extends Parser { public TerminalNode RBRACE(int i) { return getToken(PainlessParser.RBRACE, i); } - public LinknewContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); + public DotContext dot() { + return getRuleContext(DotContext.class,0); } - @Override public int getRuleIndex() { return RULE_linknew; } + public List secondary() { + return getRuleContexts(SecondaryContext.class); + } + public SecondaryContext secondary(int i) { + return getRuleContext(SecondaryContext.class,i); + } + public NewarrayContext(ChainContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinknew(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewarray(this); else return visitor.visitChildren(this); } } - public final LinknewContext linknew() throws RecognitionException { - LinknewContext _localctx = new LinknewContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_linknew); + public final ChainContext chain(boolean c) throws RecognitionException { + ChainContext _localctx = new ChainContext(_ctx, getState(), c); + enterRule(_localctx, 28, RULE_chain); try { int _alt; - enterOuterAlt(_localctx, 1); - { - setState(325); - match(NEW); - setState(326); - identifier(); - setState(342); - switch (_input.LA(1)) { - case LP: + setState(309); + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + case 1: + _localctx = new DynamicContext(_localctx); + enterOuterAlt(_localctx, 1); { - { - setState(327); - arguments(); - setState(329); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { - case 1: - { - setState(328); - linkdot(); + setState(275); + ((DynamicContext)_localctx).p = primary(_localctx.c); + setState(279); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(276); + secondary(((DynamicContext)_localctx).p.s); + } + } } - break; - } + setState(281); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } } break; - case LBRACE: + case 2: + _localctx = new StaticContext(_localctx); + enterOuterAlt(_localctx, 2); { + setState(282); + decltype(); + setState(283); + dot(); + setState(287); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(284); + secondary(true); + } + } + } + setState(289); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,20,_ctx); + } + } + break; + case 3: + _localctx = new NewarrayContext(_localctx); + enterOuterAlt(_localctx, 3); { - setState(335); + setState(290); + match(NEW); + setState(291); + match(TYPE); + setState(296); _errHandler.sync(this); _alt = 1; do { @@ -2547,11 +1935,11 @@ class PainlessParser extends Parser { case 1: { { - setState(331); + setState(292); match(LBRACE); - setState(332); + setState(293); expression(0); - setState(333); + setState(294); match(RBRACE); } } @@ -2559,25 +1947,37 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(337); + setState(298); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(340); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + setState(307); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(339); - linkdot(); + setState(300); + dot(); + setState(304); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(301); + secondary(true); + } + } + } + setState(306); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,22,_ctx); + } } break; } } - } break; - default: - throw new NoViableAltException(this); - } } } catch (RecognitionException re) { @@ -2591,48 +1991,337 @@ class PainlessParser extends Parser { return _localctx; } - public static class LinkstringContext extends ParserRuleContext { - public TerminalNode STRING() { return getToken(PainlessParser.STRING, 0); } - public LinkdotContext linkdot() { - return getRuleContext(LinkdotContext.class,0); - } - public LinkbraceContext linkbrace() { - return getRuleContext(LinkbraceContext.class,0); - } - public LinkstringContext(ParserRuleContext parent, int invokingState) { + public static class PrimaryContext extends ParserRuleContext { + public boolean c; + public boolean s = true; + public PrimaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } + public PrimaryContext(ParserRuleContext parent, int invokingState, boolean c) { super(parent, invokingState); + this.c = c; } - @Override public int getRuleIndex() { return RULE_linkstring; } + @Override public int getRuleIndex() { return RULE_primary; } + + public PrimaryContext() { } + public void copyFrom(PrimaryContext ctx) { + super.copyFrom(ctx); + this.c = ctx.c; + this.s = ctx.s; + } + } + public static class StringContext extends PrimaryContext { + public TerminalNode STRING() { return getToken(PainlessParser.STRING, 0); } + public StringContext(PrimaryContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitLinkstring(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitString(this); + else return visitor.visitChildren(this); + } + } + public static class VariableContext extends PrimaryContext { + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } + public VariableContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitVariable(this); + else return visitor.visitChildren(this); + } + } + public static class ExprprecContext extends PrimaryContext { + public ExpressionContext e; + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public ExprprecContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExprprec(this); + else return visitor.visitChildren(this); + } + } + public static class NewobjectContext extends PrimaryContext { + public TerminalNode NEW() { return getToken(PainlessParser.NEW, 0); } + public TerminalNode TYPE() { return getToken(PainlessParser.TYPE, 0); } + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class,0); + } + public NewobjectContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitNewobject(this); + else return visitor.visitChildren(this); + } + } + public static class ChainprecContext extends PrimaryContext { + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public UnaryContext unary() { + return getRuleContext(UnaryContext.class,0); + } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } + public ChainprecContext(PrimaryContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitChainprec(this); else return visitor.visitChildren(this); } } - public final LinkstringContext linkstring() throws RecognitionException { - LinkstringContext _localctx = new LinkstringContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_linkstring); + public final PrimaryContext primary(boolean c) throws RecognitionException { + PrimaryContext _localctx = new PrimaryContext(_ctx, getState(), c); + enterRule(_localctx, 30, RULE_primary); try { - enterOuterAlt(_localctx, 1); - { - setState(344); - match(STRING); - setState(347); - switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { + setState(327); + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: + _localctx = new ExprprecContext(_localctx); + enterOuterAlt(_localctx, 1); { - setState(345); - linkdot(); + setState(311); + if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); + setState(312); + match(LP); + setState(313); + ((ExprprecContext)_localctx).e = expression(0); + setState(314); + match(RP); + ((ExprprecContext)_localctx).s = ((ExprprecContext)_localctx).e.s; } break; case 2: + _localctx = new ChainprecContext(_localctx); + enterOuterAlt(_localctx, 2); { - setState(346); - linkbrace(); + setState(317); + if (!( _localctx.c )) throw new FailedPredicateException(this, " $c "); + setState(318); + match(LP); + setState(319); + unary(true); + setState(320); + match(RP); + } + break; + case 3: + _localctx = new StringContext(_localctx); + enterOuterAlt(_localctx, 3); + { + setState(322); + match(STRING); + } + break; + case 4: + _localctx = new VariableContext(_localctx); + enterOuterAlt(_localctx, 4); + { + setState(323); + match(ID); + } + break; + case 5: + _localctx = new NewobjectContext(_localctx); + enterOuterAlt(_localctx, 5); + { + setState(324); + match(NEW); + setState(325); + match(TYPE); + setState(326); + arguments(); } break; } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class SecondaryContext extends ParserRuleContext { + public boolean s; + public DotContext dot() { + return getRuleContext(DotContext.class,0); + } + public BraceContext brace() { + return getRuleContext(BraceContext.class,0); + } + public SecondaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } + public SecondaryContext(ParserRuleContext parent, int invokingState, boolean s) { + super(parent, invokingState); + this.s = s; + } + @Override public int getRuleIndex() { return RULE_secondary; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitSecondary(this); + else return visitor.visitChildren(this); + } + } + + public final SecondaryContext secondary(boolean s) throws RecognitionException { + SecondaryContext _localctx = new SecondaryContext(_ctx, getState(), s); + enterRule(_localctx, 32, RULE_secondary); + try { + setState(333); + switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { + case 1: + enterOuterAlt(_localctx, 1); + { + setState(329); + if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); + setState(330); + dot(); + } + break; + case 2: + enterOuterAlt(_localctx, 2); + { + setState(331); + if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); + setState(332); + brace(); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class DotContext extends ParserRuleContext { + public DotContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_dot; } + + public DotContext() { } + public void copyFrom(DotContext ctx) { + super.copyFrom(ctx); + } + } + public static class CallinvokeContext extends DotContext { + public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } + public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } + public ArgumentsContext arguments() { + return getRuleContext(ArgumentsContext.class,0); + } + public CallinvokeContext(DotContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitCallinvoke(this); + else return visitor.visitChildren(this); + } + } + public static class FieldaccessContext extends DotContext { + public TerminalNode DOT() { return getToken(PainlessParser.DOT, 0); } + public TerminalNode DOTID() { return getToken(PainlessParser.DOTID, 0); } + public TerminalNode DOTINTEGER() { return getToken(PainlessParser.DOTINTEGER, 0); } + public FieldaccessContext(DotContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitFieldaccess(this); + else return visitor.visitChildren(this); + } + } + + public final DotContext dot() throws RecognitionException { + DotContext _localctx = new DotContext(_ctx, getState()); + enterRule(_localctx, 34, RULE_dot); + int _la; + try { + setState(340); + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + case 1: + _localctx = new CallinvokeContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(335); + match(DOT); + setState(336); + match(DOTID); + setState(337); + arguments(); + } + break; + case 2: + _localctx = new FieldaccessContext(_localctx); + enterOuterAlt(_localctx, 2); + { + setState(338); + match(DOT); + setState(339); + _la = _input.LA(1); + if ( !(_la==DOTINTEGER || _la==DOTID) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class BraceContext extends ParserRuleContext { + public BraceContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_brace; } + + public BraceContext() { } + public void copyFrom(BraceContext ctx) { + super.copyFrom(ctx); + } + } + public static class BraceaccessContext extends BraceContext { + public TerminalNode LBRACE() { return getToken(PainlessParser.LBRACE, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RBRACE() { return getToken(PainlessParser.RBRACE, 0); } + public BraceaccessContext(BraceContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitBraceaccess(this); + else return visitor.visitChildren(this); + } + } + + public final BraceContext brace() throws RecognitionException { + BraceContext _localctx = new BraceContext(_ctx, getState()); + enterRule(_localctx, 36, RULE_brace); + try { + _localctx = new BraceaccessContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(342); + match(LBRACE); + setState(343); + expression(0); + setState(344); + match(RBRACE); } } catch (RecognitionException re) { @@ -2672,40 +2361,40 @@ class PainlessParser extends Parser { public final ArgumentsContext arguments() throws RecognitionException { ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_arguments); + enterRule(_localctx, 38, RULE_arguments); int _la; try { enterOuterAlt(_localctx, 1); { { - setState(349); + setState(346); match(LP); - setState(358); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR) | (1L << OCTAL))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (ID - 64)))) != 0)) { + setState(355); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: { - setState(350); + setState(347); expression(0); - setState(355); + setState(352); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(351); + setState(348); match(COMMA); - setState(352); + setState(349); expression(0); } } - setState(357); + setState(354); _errHandler.sync(this); _la = _input.LA(1); } } + break; } - - setState(360); + setState(357); match(RP); } } @@ -2723,179 +2412,224 @@ class PainlessParser extends Parser { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 13: + case 1: + return statement_sempred((StatementContext)_localctx, predIndex); + case 12: return expression_sempred((ExpressionContext)_localctx, predIndex); + case 13: + return unary_sempred((UnaryContext)_localctx, predIndex); + case 15: + return primary_sempred((PrimaryContext)_localctx, predIndex); + case 16: + return secondary_sempred((SecondaryContext)_localctx, predIndex); + } + return true; + } + private boolean statement_sempred(StatementContext _localctx, int predIndex) { + switch (predIndex) { + case 0: + return _input.LA(1) != ELSE ; } return true; } private boolean expression_sempred(ExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 0: - return precpred(_ctx, 12); case 1: - return precpred(_ctx, 11); + return precpred(_ctx, 12); case 2: - return precpred(_ctx, 10); + return precpred(_ctx, 11); case 3: - return precpred(_ctx, 9); + return precpred(_ctx, 10); case 4: - return precpred(_ctx, 8); + return precpred(_ctx, 9); case 5: - return precpred(_ctx, 7); + return precpred(_ctx, 8); case 6: - return precpred(_ctx, 6); + return precpred(_ctx, 7); case 7: - return precpred(_ctx, 5); + return precpred(_ctx, 6); case 8: - return precpred(_ctx, 4); + return precpred(_ctx, 5); case 9: - return precpred(_ctx, 3); + return precpred(_ctx, 4); case 10: + return precpred(_ctx, 3); + case 11: return precpred(_ctx, 2); } return true; } + private boolean unary_sempred(UnaryContext _localctx, int predIndex) { + switch (predIndex) { + case 12: + return !_localctx.c ; + case 13: + return !_localctx.c ; + case 14: + return !_localctx.c ; + case 15: + return !_localctx.c ; + case 16: + return !_localctx.c ; + case 17: + return !_localctx.c ; + case 18: + return !_localctx.c ; + case 19: + return !_localctx.c ; + } + return true; + } + private boolean primary_sempred(PrimaryContext _localctx, int predIndex) { + switch (predIndex) { + case 20: + return !_localctx.c ; + case 21: + return _localctx.c ; + } + return true; + } + private boolean secondary_sempred(SecondaryContext _localctx, int predIndex) { + switch (predIndex) { + case 22: + return _localctx.s ; + case 23: + return _localctx.s ; + } + return true; + } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3K\u016d\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3L\u016a\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ - "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ - "\4\32\t\32\3\2\6\2\66\n\2\r\2\16\2\67\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3"+ - "\3\3\5\3C\n\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3K\n\3\3\3\3\3\3\3\3\3\3\3\3\3"+ - "\3\3\3\3\3\3\3\3\3\3\5\3X\n\3\3\3\3\3\5\3\\\n\3\3\3\3\3\5\3`\n\3\3\3\3"+ - "\3\3\3\5\3e\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+ - "\3\6\3u\n\3\r\3\16\3v\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u0080\n\3\3\4\3"+ - "\4\6\4\u0084\n\4\r\4\16\4\u0085\3\4\3\4\3\4\5\4\u008b\n\4\3\5\3\5\5\5"+ - "\u008f\n\5\3\6\3\6\3\6\3\7\3\7\5\7\u0096\n\7\3\b\3\b\3\t\3\t\3\t\3\t\7"+ - "\t\u009e\n\t\f\t\16\t\u00a1\13\t\3\n\3\n\3\n\7\n\u00a6\n\n\f\n\16\n\u00a9"+ - "\13\n\3\13\3\13\3\13\5\13\u00ae\n\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\5"+ - "\f\u00b8\n\f\3\r\3\r\5\r\u00bc\n\r\3\16\3\16\3\16\3\16\7\16\u00c2\n\16"+ - "\f\16\16\16\u00c5\13\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3"+ - "\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3"+ - "\17\3\17\3\17\3\17\3\17\5\17\u00e3\n\17\3\17\3\17\3\17\3\17\3\17\3\17"+ + "\4\23\t\23\4\24\t\24\4\25\t\25\3\2\7\2,\n\2\f\2\16\2/\13\2\3\2\3\2\3\3"+ + "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3;\n\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3C\n\3"+ + "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3P\n\3\3\3\3\3\5\3T\n\3"+ + "\3\3\3\3\5\3X\n\3\3\3\3\3\3\3\5\3]\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\3\3\3\3\3\3\3\3\3\3\3\6\3m\n\3\r\3\16\3n\3\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\5\3x\n\3\3\4\3\4\5\4|\n\4\3\5\3\5\7\5\u0080\n\5\f\5\16\5\u0083\13\5"+ + "\3\5\3\5\3\6\3\6\3\7\3\7\5\7\u008b\n\7\3\b\3\b\3\t\3\t\3\t\3\t\7\t\u0093"+ + "\n\t\f\t\16\t\u0096\13\t\3\n\3\n\3\n\7\n\u009b\n\n\f\n\16\n\u009e\13\n"+ + "\3\13\3\13\3\13\5\13\u00a3\n\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3"+ + "\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u00b7\n\16\3\16\3\16"+ + "\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16"+ + "\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16"+ + "\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16"+ + "\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\7\16"+ + "\u00f2\n\16\f\16\16\16\u00f5\13\16\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ "\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ - "\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17"+ - "\3\17\3\17\7\17\u0109\n\17\f\17\16\17\u010c\13\17\3\20\3\20\3\20\3\20"+ - "\3\20\5\20\u0113\n\20\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u011b\n\21\3"+ - "\21\3\21\3\21\5\21\u0120\n\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+ - "\5\22\u012a\n\22\3\23\3\23\3\23\3\23\3\23\5\23\u0131\n\23\3\24\3\24\3"+ - "\24\5\24\u0136\n\24\3\25\3\25\3\25\3\25\5\25\u013c\n\25\3\26\3\26\3\26"+ - "\5\26\u0141\n\26\3\27\3\27\3\27\5\27\u0146\n\27\3\30\3\30\3\30\3\30\5"+ - "\30\u014c\n\30\3\30\3\30\3\30\3\30\6\30\u0152\n\30\r\30\16\30\u0153\3"+ - "\30\5\30\u0157\n\30\5\30\u0159\n\30\3\31\3\31\3\31\5\31\u015e\n\31\3\32"+ - "\3\32\3\32\3\32\7\32\u0164\n\32\f\32\16\32\u0167\13\32\5\32\u0169\n\32"+ - "\3\32\3\32\3\32\2\3\34\33\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&"+ - "(*,.\60\62\2\r\3\3\r\r\4\2\32\33\37 \3\2\65@\3\2AD\3\2\63\64\3\2\34\36"+ - "\3\2\37 \3\2!#\3\2$\'\3\2(+\3\2JK\u01a3\2\65\3\2\2\2\4\177\3\2\2\2\6\u008a"+ - "\3\2\2\2\b\u008e\3\2\2\2\n\u0090\3\2\2\2\f\u0095\3\2\2\2\16\u0097\3\2"+ - "\2\2\20\u0099\3\2\2\2\22\u00a2\3\2\2\2\24\u00aa\3\2\2\2\26\u00af\3\2\2"+ - "\2\30\u00b9\3\2\2\2\32\u00bd\3\2\2\2\34\u00e2\3\2\2\2\36\u0112\3\2\2\2"+ - " \u0114\3\2\2\2\"\u0121\3\2\2\2$\u012b\3\2\2\2&\u0132\3\2\2\2(\u0137\3"+ - "\2\2\2*\u013d\3\2\2\2,\u0142\3\2\2\2.\u0147\3\2\2\2\60\u015a\3\2\2\2\62"+ - "\u015f\3\2\2\2\64\66\5\4\3\2\65\64\3\2\2\2\66\67\3\2\2\2\67\65\3\2\2\2"+ - "\678\3\2\2\289\3\2\2\29:\7\2\2\3:\3\3\2\2\2;<\7\16\2\2<=\7\t\2\2=>\5\34"+ - "\17\2>?\7\n\2\2?B\5\6\4\2@A\7\17\2\2AC\5\6\4\2B@\3\2\2\2BC\3\2\2\2C\u0080"+ - "\3\2\2\2DE\7\20\2\2EF\7\t\2\2FG\5\34\17\2GJ\7\n\2\2HK\5\6\4\2IK\5\b\5"+ - "\2JH\3\2\2\2JI\3\2\2\2K\u0080\3\2\2\2LM\7\21\2\2MN\5\6\4\2NO\7\20\2\2"+ - "OP\7\t\2\2PQ\5\34\17\2QR\7\n\2\2RS\t\2\2\2S\u0080\3\2\2\2TU\7\22\2\2U"+ - "W\7\t\2\2VX\5\f\7\2WV\3\2\2\2WX\3\2\2\2XY\3\2\2\2Y[\7\r\2\2Z\\\5\34\17"+ - "\2[Z\3\2\2\2[\\\3\2\2\2\\]\3\2\2\2]_\7\r\2\2^`\5\16\b\2_^\3\2\2\2_`\3"+ - "\2\2\2`a\3\2\2\2ad\7\n\2\2be\5\6\4\2ce\5\b\5\2db\3\2\2\2dc\3\2\2\2e\u0080"+ - "\3\2\2\2fg\5\20\t\2gh\t\2\2\2h\u0080\3\2\2\2ij\7\23\2\2j\u0080\t\2\2\2"+ - "kl\7\24\2\2l\u0080\t\2\2\2mn\7\25\2\2no\5\34\17\2op\t\2\2\2p\u0080\3\2"+ - "\2\2qr\7\27\2\2rt\5\6\4\2su\5\26\f\2ts\3\2\2\2uv\3\2\2\2vt\3\2\2\2vw\3"+ - "\2\2\2w\u0080\3\2\2\2xy\7\31\2\2yz\5\34\17\2z{\t\2\2\2{\u0080\3\2\2\2"+ - "|}\5\34\17\2}~\t\2\2\2~\u0080\3\2\2\2\177;\3\2\2\2\177D\3\2\2\2\177L\3"+ - "\2\2\2\177T\3\2\2\2\177f\3\2\2\2\177i\3\2\2\2\177k\3\2\2\2\177m\3\2\2"+ - "\2\177q\3\2\2\2\177x\3\2\2\2\177|\3\2\2\2\u0080\5\3\2\2\2\u0081\u0083"+ - "\7\5\2\2\u0082\u0084\5\4\3\2\u0083\u0082\3\2\2\2\u0084\u0085\3\2\2\2\u0085"+ - "\u0083\3\2\2\2\u0085\u0086\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\7\6"+ - "\2\2\u0088\u008b\3\2\2\2\u0089\u008b\5\4\3\2\u008a\u0081\3\2\2\2\u008a"+ - "\u0089\3\2\2\2\u008b\7\3\2\2\2\u008c\u008f\5\n\6\2\u008d\u008f\7\r\2\2"+ - "\u008e\u008c\3\2\2\2\u008e\u008d\3\2\2\2\u008f\t\3\2\2\2\u0090\u0091\7"+ - "\5\2\2\u0091\u0092\7\6\2\2\u0092\13\3\2\2\2\u0093\u0096\5\20\t\2\u0094"+ - "\u0096\5\34\17\2\u0095\u0093\3\2\2\2\u0095\u0094\3\2\2\2\u0096\r\3\2\2"+ - "\2\u0097\u0098\5\34\17\2\u0098\17\3\2\2\2\u0099\u009a\5\22\n\2\u009a\u009f"+ - "\5\24\13\2\u009b\u009c\7\f\2\2\u009c\u009e\5\24\13\2\u009d\u009b\3\2\2"+ - "\2\u009e\u00a1\3\2\2\2\u009f\u009d\3\2\2\2\u009f\u00a0\3\2\2\2\u00a0\21"+ - "\3\2\2\2\u00a1\u009f\3\2\2\2\u00a2\u00a7\5\30\r\2\u00a3\u00a4\7\7\2\2"+ - "\u00a4\u00a6\7\b\2\2\u00a5\u00a3\3\2\2\2\u00a6\u00a9\3\2\2\2\u00a7\u00a5"+ - "\3\2\2\2\u00a7\u00a8\3\2\2\2\u00a8\23\3\2\2\2\u00a9\u00a7\3\2\2\2\u00aa"+ - "\u00ad\5\30\r\2\u00ab\u00ac\7\65\2\2\u00ac\u00ae\5\34\17\2\u00ad\u00ab"+ - "\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\25\3\2\2\2\u00af\u00b0\7\30\2\2\u00b0"+ - "\u00b1\7\t\2\2\u00b1\u00b2\5\30\r\2\u00b2\u00b3\5\30\r\2\u00b3\u00b4\3"+ - "\2\2\2\u00b4\u00b7\7\n\2\2\u00b5\u00b8\5\6\4\2\u00b6\u00b8\5\n\6\2\u00b7"+ - "\u00b5\3\2\2\2\u00b7\u00b6\3\2\2\2\u00b8\27\3\2\2\2\u00b9\u00bb\7I\2\2"+ - "\u00ba\u00bc\5\32\16\2\u00bb\u00ba\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\31"+ - "\3\2\2\2\u00bd\u00be\7$\2\2\u00be\u00c3\5\30\r\2\u00bf\u00c0\7\f\2\2\u00c0"+ - "\u00c2\5\30\r\2\u00c1\u00bf\3\2\2\2\u00c2\u00c5\3\2\2\2\u00c3\u00c1\3"+ - "\2\2\2\u00c3\u00c4\3\2\2\2\u00c4\u00c6\3\2\2\2\u00c5\u00c3\3\2\2\2\u00c6"+ - "\u00c7\7&\2\2\u00c7\33\3\2\2\2\u00c8\u00c9\b\17\1\2\u00c9\u00ca\t\3\2"+ - "\2\u00ca\u00e3\5\34\17\20\u00cb\u00cc\7\t\2\2\u00cc\u00cd\5\22\n\2\u00cd"+ - "\u00ce\7\n\2\2\u00ce\u00cf\5\34\17\17\u00cf\u00e3\3\2\2\2\u00d0\u00d1"+ - "\5\36\20\2\u00d1\u00d2\t\4\2\2\u00d2\u00d3\5\34\17\3\u00d3\u00e3\3\2\2"+ - "\2\u00d4\u00d5\7\t\2\2\u00d5\u00d6\5\34\17\2\u00d6\u00d7\7\n\2\2\u00d7"+ - "\u00e3\3\2\2\2\u00d8\u00e3\t\5\2\2\u00d9\u00e3\7F\2\2\u00da\u00e3\7G\2"+ - "\2\u00db\u00e3\7H\2\2\u00dc\u00dd\5\36\20\2\u00dd\u00de\t\6\2\2\u00de"+ - "\u00e3\3\2\2\2\u00df\u00e0\t\6\2\2\u00e0\u00e3\5\36\20\2\u00e1\u00e3\5"+ - "\36\20\2\u00e2\u00c8\3\2\2\2\u00e2\u00cb\3\2\2\2\u00e2\u00d0\3\2\2\2\u00e2"+ - "\u00d4\3\2\2\2\u00e2\u00d8\3\2\2\2\u00e2\u00d9\3\2\2\2\u00e2\u00da\3\2"+ - "\2\2\u00e2\u00db\3\2\2\2\u00e2\u00dc\3\2\2\2\u00e2\u00df\3\2\2\2\u00e2"+ - "\u00e1\3\2\2\2\u00e3\u010a\3\2\2\2\u00e4\u00e5\f\16\2\2\u00e5\u00e6\t"+ - "\7\2\2\u00e6\u0109\5\34\17\17\u00e7\u00e8\f\r\2\2\u00e8\u00e9\t\b\2\2"+ - "\u00e9\u0109\5\34\17\16\u00ea\u00eb\f\f\2\2\u00eb\u00ec\t\t\2\2\u00ec"+ - "\u0109\5\34\17\r\u00ed\u00ee\f\13\2\2\u00ee\u00ef\t\n\2\2\u00ef\u0109"+ - "\5\34\17\f\u00f0\u00f1\f\n\2\2\u00f1\u00f2\t\13\2\2\u00f2\u0109\5\34\17"+ - "\13\u00f3\u00f4\f\t\2\2\u00f4\u00f5\7,\2\2\u00f5\u0109\5\34\17\n\u00f6"+ - "\u00f7\f\b\2\2\u00f7\u00f8\7-\2\2\u00f8\u0109\5\34\17\t\u00f9\u00fa\f"+ - "\7\2\2\u00fa\u00fb\7.\2\2\u00fb\u0109\5\34\17\b\u00fc\u00fd\f\6\2\2\u00fd"+ - "\u00fe\7/\2\2\u00fe\u0109\5\34\17\7\u00ff\u0100\f\5\2\2\u0100\u0101\7"+ - "\60\2\2\u0101\u0109\5\34\17\6\u0102\u0103\f\4\2\2\u0103\u0104\7\61\2\2"+ - "\u0104\u0105\5\34\17\2\u0105\u0106\7\62\2\2\u0106\u0107\5\34\17\4\u0107"+ - "\u0109\3\2\2\2\u0108\u00e4\3\2\2\2\u0108\u00e7\3\2\2\2\u0108\u00ea\3\2"+ - "\2\2\u0108\u00ed\3\2\2\2\u0108\u00f0\3\2\2\2\u0108\u00f3\3\2\2\2\u0108"+ - "\u00f6\3\2\2\2\u0108\u00f9\3\2\2\2\u0108\u00fc\3\2\2\2\u0108\u00ff\3\2"+ - "\2\2\u0108\u0102\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3\2\2\2\u010a"+ - "\u010b\3\2\2\2\u010b\35\3\2\2\2\u010c\u010a\3\2\2\2\u010d\u0113\5 \21"+ - "\2\u010e\u0113\5\"\22\2\u010f\u0113\5*\26\2\u0110\u0113\5.\30\2\u0111"+ - "\u0113\5\60\31\2\u0112\u010d\3\2\2\2\u0112\u010e\3\2\2\2\u0112\u010f\3"+ - "\2\2\2\u0112\u0110\3\2\2\2\u0112\u0111\3\2\2\2\u0113\37\3\2\2\2\u0114"+ - "\u011a\7\t\2\2\u0115\u011b\5 \21\2\u0116\u011b\5\"\22\2\u0117\u011b\5"+ - "*\26\2\u0118\u011b\5.\30\2\u0119\u011b\5\60\31\2\u011a\u0115\3\2\2\2\u011a"+ - "\u0116\3\2\2\2\u011a\u0117\3\2\2\2\u011a\u0118\3\2\2\2\u011a\u0119\3\2"+ - "\2\2\u011b\u011c\3\2\2\2\u011c\u011f\7\n\2\2\u011d\u0120\5&\24\2\u011e"+ - "\u0120\5$\23\2\u011f\u011d\3\2\2\2\u011f\u011e\3\2\2\2\u011f\u0120\3\2"+ - "\2\2\u0120!\3\2\2\2\u0121\u0122\7\t\2\2\u0122\u0123\5\22\n\2\u0123\u0129"+ - "\7\n\2\2\u0124\u012a\5 \21\2\u0125\u012a\5\"\22\2\u0126\u012a\5*\26\2"+ - "\u0127\u012a\5.\30\2\u0128\u012a\5\60\31\2\u0129\u0124\3\2\2\2\u0129\u0125"+ - "\3\2\2\2\u0129\u0126\3\2\2\2\u0129\u0127\3\2\2\2\u0129\u0128\3\2\2\2\u012a"+ - "#\3\2\2\2\u012b\u012c\7\7\2\2\u012c\u012d\5\34\17\2\u012d\u0130\7\b\2"+ - "\2\u012e\u0131\5&\24\2\u012f\u0131\5$\23\2\u0130\u012e\3\2\2\2\u0130\u012f"+ - "\3\2\2\2\u0130\u0131\3\2\2\2\u0131%\3\2\2\2\u0132\u0135\7\13\2\2\u0133"+ - "\u0136\5(\25\2\u0134\u0136\5,\27\2\u0135\u0133\3\2\2\2\u0135\u0134\3\2"+ - "\2\2\u0136\'\3\2\2\2\u0137\u0138\7K\2\2\u0138\u013b\5\62\32\2\u0139\u013c"+ - "\5&\24\2\u013a\u013c\5$\23\2\u013b\u0139\3\2\2\2\u013b\u013a\3\2\2\2\u013b"+ - "\u013c\3\2\2\2\u013c)\3\2\2\2\u013d\u0140\5\30\r\2\u013e\u0141\5&\24\2"+ - "\u013f\u0141\5$\23\2\u0140\u013e\3\2\2\2\u0140\u013f\3\2\2\2\u0140\u0141"+ - "\3\2\2\2\u0141+\3\2\2\2\u0142\u0145\t\f\2\2\u0143\u0146\5&\24\2\u0144"+ - "\u0146\5$\23\2\u0145\u0143\3\2\2\2\u0145\u0144\3\2\2\2\u0145\u0146\3\2"+ - "\2\2\u0146-\3\2\2\2\u0147\u0148\7\26\2\2\u0148\u0158\5\30\r\2\u0149\u014b"+ - "\5\62\32\2\u014a\u014c\5&\24\2\u014b\u014a\3\2\2\2\u014b\u014c\3\2\2\2"+ - "\u014c\u0159\3\2\2\2\u014d\u014e\7\7\2\2\u014e\u014f\5\34\17\2\u014f\u0150"+ - "\7\b\2\2\u0150\u0152\3\2\2\2\u0151\u014d\3\2\2\2\u0152\u0153\3\2\2\2\u0153"+ - "\u0151\3\2\2\2\u0153\u0154\3\2\2\2\u0154\u0156\3\2\2\2\u0155\u0157\5&"+ - "\24\2\u0156\u0155\3\2\2\2\u0156\u0157\3\2\2\2\u0157\u0159\3\2\2\2\u0158"+ - "\u0149\3\2\2\2\u0158\u0151\3\2\2\2\u0159/\3\2\2\2\u015a\u015d\7E\2\2\u015b"+ - "\u015e\5&\24\2\u015c\u015e\5$\23\2\u015d\u015b\3\2\2\2\u015d\u015c\3\2"+ - "\2\2\u015d\u015e\3\2\2\2\u015e\61\3\2\2\2\u015f\u0168\7\t\2\2\u0160\u0165"+ - "\5\34\17\2\u0161\u0162\7\f\2\2\u0162\u0164\5\34\17\2\u0163\u0161\3\2\2"+ - "\2\u0164\u0167\3\2\2\2\u0165\u0163\3\2\2\2\u0165\u0166\3\2\2\2\u0166\u0169"+ - "\3\2\2\2\u0167\u0165\3\2\2\2\u0168\u0160\3\2\2\2\u0168\u0169\3\2\2\2\u0169"+ - "\u016a\3\2\2\2\u016a\u016b\7\n\2\2\u016b\63\3\2\2\2(\67BJW[_dv\177\u0085"+ - "\u008a\u008e\u0095\u009f\u00a7\u00ad\u00b7\u00bb\u00c3\u00e2\u0108\u010a"+ - "\u0112\u011a\u011f\u0129\u0130\u0135\u013b\u0140\u0145\u014b\u0153\u0156"+ - "\u0158\u015d\u0165\u0168"; + "\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\17\5\17\u0114\n\17\3\20\3\20\7\20"+ + "\u0118\n\20\f\20\16\20\u011b\13\20\3\20\3\20\3\20\7\20\u0120\n\20\f\20"+ + "\16\20\u0123\13\20\3\20\3\20\3\20\3\20\3\20\3\20\6\20\u012b\n\20\r\20"+ + "\16\20\u012c\3\20\3\20\7\20\u0131\n\20\f\20\16\20\u0134\13\20\5\20\u0136"+ + "\n\20\5\20\u0138\n\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+ + "\3\21\3\21\3\21\3\21\3\21\3\21\5\21\u014a\n\21\3\22\3\22\3\22\3\22\5\22"+ + "\u0150\n\22\3\23\3\23\3\23\3\23\3\23\5\23\u0157\n\23\3\24\3\24\3\24\3"+ + "\24\3\25\3\25\3\25\3\25\7\25\u0161\n\25\f\25\16\25\u0164\13\25\5\25\u0166"+ + "\n\25\3\25\3\25\3\25\2\3\32\26\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36"+ + " \"$&(\2\r\3\3\r\r\3\2\65@\3\2\34\36\3\2\37 \3\2!#\3\2$\'\3\2(+\3\2\63"+ + "\64\3\2AD\4\2\32\33\37 \3\2KL\u0190\2-\3\2\2\2\4w\3\2\2\2\6{\3\2\2\2\b"+ + "}\3\2\2\2\n\u0086\3\2\2\2\f\u008a\3\2\2\2\16\u008c\3\2\2\2\20\u008e\3"+ + "\2\2\2\22\u0097\3\2\2\2\24\u009f\3\2\2\2\26\u00a4\3\2\2\2\30\u00ab\3\2"+ + "\2\2\32\u00b6\3\2\2\2\34\u0113\3\2\2\2\36\u0137\3\2\2\2 \u0149\3\2\2\2"+ + "\"\u014f\3\2\2\2$\u0156\3\2\2\2&\u0158\3\2\2\2(\u015c\3\2\2\2*,\5\4\3"+ + "\2+*\3\2\2\2,/\3\2\2\2-+\3\2\2\2-.\3\2\2\2.\60\3\2\2\2/-\3\2\2\2\60\61"+ + "\7\2\2\3\61\3\3\2\2\2\62\63\7\16\2\2\63\64\7\t\2\2\64\65\5\32\16\2\65"+ + "\66\7\n\2\2\66:\5\6\4\2\678\7\17\2\28;\5\6\4\29;\6\3\2\2:\67\3\2\2\2:"+ + "9\3\2\2\2;x\3\2\2\2<=\7\20\2\2=>\7\t\2\2>?\5\32\16\2?B\7\n\2\2@C\5\6\4"+ + "\2AC\5\n\6\2B@\3\2\2\2BA\3\2\2\2Cx\3\2\2\2DE\7\21\2\2EF\5\b\5\2FG\7\20"+ + "\2\2GH\7\t\2\2HI\5\32\16\2IJ\7\n\2\2JK\5\30\r\2Kx\3\2\2\2LM\7\22\2\2M"+ + "O\7\t\2\2NP\5\f\7\2ON\3\2\2\2OP\3\2\2\2PQ\3\2\2\2QS\7\r\2\2RT\5\32\16"+ + "\2SR\3\2\2\2ST\3\2\2\2TU\3\2\2\2UW\7\r\2\2VX\5\16\b\2WV\3\2\2\2WX\3\2"+ + "\2\2XY\3\2\2\2Y\\\7\n\2\2Z]\5\6\4\2[]\5\n\6\2\\Z\3\2\2\2\\[\3\2\2\2]x"+ + "\3\2\2\2^_\5\20\t\2_`\5\30\r\2`x\3\2\2\2ab\7\23\2\2bx\5\30\r\2cd\7\24"+ + "\2\2dx\5\30\r\2ef\7\25\2\2fg\5\32\16\2gh\5\30\r\2hx\3\2\2\2ij\7\27\2\2"+ + "jl\5\b\5\2km\5\26\f\2lk\3\2\2\2mn\3\2\2\2nl\3\2\2\2no\3\2\2\2ox\3\2\2"+ + "\2pq\7\31\2\2qr\5\32\16\2rs\5\30\r\2sx\3\2\2\2tu\5\32\16\2uv\5\30\r\2"+ + "vx\3\2\2\2w\62\3\2\2\2w<\3\2\2\2wD\3\2\2\2wL\3\2\2\2w^\3\2\2\2wa\3\2\2"+ + "\2wc\3\2\2\2we\3\2\2\2wi\3\2\2\2wp\3\2\2\2wt\3\2\2\2x\5\3\2\2\2y|\5\b"+ + "\5\2z|\5\4\3\2{y\3\2\2\2{z\3\2\2\2|\7\3\2\2\2}\u0081\7\5\2\2~\u0080\5"+ + "\4\3\2\177~\3\2\2\2\u0080\u0083\3\2\2\2\u0081\177\3\2\2\2\u0081\u0082"+ + "\3\2\2\2\u0082\u0084\3\2\2\2\u0083\u0081\3\2\2\2\u0084\u0085\7\6\2\2\u0085"+ + "\t\3\2\2\2\u0086\u0087\7\r\2\2\u0087\13\3\2\2\2\u0088\u008b\5\20\t\2\u0089"+ + "\u008b\5\32\16\2\u008a\u0088\3\2\2\2\u008a\u0089\3\2\2\2\u008b\r\3\2\2"+ + "\2\u008c\u008d\5\32\16\2\u008d\17\3\2\2\2\u008e\u008f\5\22\n\2\u008f\u0094"+ + "\5\24\13\2\u0090\u0091\7\f\2\2\u0091\u0093\5\24\13\2\u0092\u0090\3\2\2"+ + "\2\u0093\u0096\3\2\2\2\u0094\u0092\3\2\2\2\u0094\u0095\3\2\2\2\u0095\21"+ + "\3\2\2\2\u0096\u0094\3\2\2\2\u0097\u009c\7I\2\2\u0098\u0099\7\7\2\2\u0099"+ + "\u009b\7\b\2\2\u009a\u0098\3\2\2\2\u009b\u009e\3\2\2\2\u009c\u009a\3\2"+ + "\2\2\u009c\u009d\3\2\2\2\u009d\23\3\2\2\2\u009e\u009c\3\2\2\2\u009f\u00a2"+ + "\7J\2\2\u00a0\u00a1\7\65\2\2\u00a1\u00a3\5\32\16\2\u00a2\u00a0\3\2\2\2"+ + "\u00a2\u00a3\3\2\2\2\u00a3\25\3\2\2\2\u00a4\u00a5\7\30\2\2\u00a5\u00a6"+ + "\7\t\2\2\u00a6\u00a7\7I\2\2\u00a7\u00a8\7J\2\2\u00a8\u00a9\7\n\2\2\u00a9"+ + "\u00aa\5\b\5\2\u00aa\27\3\2\2\2\u00ab\u00ac\t\2\2\2\u00ac\31\3\2\2\2\u00ad"+ + "\u00ae\b\16\1\2\u00ae\u00af\5\36\20\2\u00af\u00b0\t\3\2\2\u00b0\u00b1"+ + "\5\32\16\3\u00b1\u00b2\b\16\1\2\u00b2\u00b7\3\2\2\2\u00b3\u00b4\5\34\17"+ + "\2\u00b4\u00b5\b\16\1\2\u00b5\u00b7\3\2\2\2\u00b6\u00ad\3\2\2\2\u00b6"+ + "\u00b3\3\2\2\2\u00b7\u00f3\3\2\2\2\u00b8\u00b9\f\16\2\2\u00b9\u00ba\t"+ + "\4\2\2\u00ba\u00bb\5\32\16\17\u00bb\u00bc\b\16\1\2\u00bc\u00f2\3\2\2\2"+ + "\u00bd\u00be\f\r\2\2\u00be\u00bf\t\5\2\2\u00bf\u00c0\5\32\16\16\u00c0"+ + "\u00c1\b\16\1\2\u00c1\u00f2\3\2\2\2\u00c2\u00c3\f\f\2\2\u00c3\u00c4\t"+ + "\6\2\2\u00c4\u00c5\5\32\16\r\u00c5\u00c6\b\16\1\2\u00c6\u00f2\3\2\2\2"+ + "\u00c7\u00c8\f\13\2\2\u00c8\u00c9\t\7\2\2\u00c9\u00ca\5\32\16\f\u00ca"+ + "\u00cb\b\16\1\2\u00cb\u00f2\3\2\2\2\u00cc\u00cd\f\n\2\2\u00cd\u00ce\t"+ + "\b\2\2\u00ce\u00cf\5\32\16\13\u00cf\u00d0\b\16\1\2\u00d0\u00f2\3\2\2\2"+ + "\u00d1\u00d2\f\t\2\2\u00d2\u00d3\7,\2\2\u00d3\u00d4\5\32\16\n\u00d4\u00d5"+ + "\b\16\1\2\u00d5\u00f2\3\2\2\2\u00d6\u00d7\f\b\2\2\u00d7\u00d8\7-\2\2\u00d8"+ + "\u00d9\5\32\16\t\u00d9\u00da\b\16\1\2\u00da\u00f2\3\2\2\2\u00db\u00dc"+ + "\f\7\2\2\u00dc\u00dd\7.\2\2\u00dd\u00de\5\32\16\b\u00de\u00df\b\16\1\2"+ + "\u00df\u00f2\3\2\2\2\u00e0\u00e1\f\6\2\2\u00e1\u00e2\7/\2\2\u00e2\u00e3"+ + "\5\32\16\7\u00e3\u00e4\b\16\1\2\u00e4\u00f2\3\2\2\2\u00e5\u00e6\f\5\2"+ + "\2\u00e6\u00e7\7\60\2\2\u00e7\u00e8\5\32\16\6\u00e8\u00e9\b\16\1\2\u00e9"+ + "\u00f2\3\2\2\2\u00ea\u00eb\f\4\2\2\u00eb\u00ec\7\61\2\2\u00ec\u00ed\5"+ + "\32\16\2\u00ed\u00ee\7\62\2\2\u00ee\u00ef\5\32\16\4\u00ef\u00f0\b\16\1"+ + "\2\u00f0\u00f2\3\2\2\2\u00f1\u00b8\3\2\2\2\u00f1\u00bd\3\2\2\2\u00f1\u00c2"+ + "\3\2\2\2\u00f1\u00c7\3\2\2\2\u00f1\u00cc\3\2\2\2\u00f1\u00d1\3\2\2\2\u00f1"+ + "\u00d6\3\2\2\2\u00f1\u00db\3\2\2\2\u00f1\u00e0\3\2\2\2\u00f1\u00e5\3\2"+ + "\2\2\u00f1\u00ea\3\2\2\2\u00f2\u00f5\3\2\2\2\u00f3\u00f1\3\2\2\2\u00f3"+ + "\u00f4\3\2\2\2\u00f4\33\3\2\2\2\u00f5\u00f3\3\2\2\2\u00f6\u00f7\6\17\16"+ + "\3\u00f7\u00f8\t\t\2\2\u00f8\u0114\5\36\20\2\u00f9\u00fa\6\17\17\3\u00fa"+ + "\u00fb\5\36\20\2\u00fb\u00fc\t\t\2\2\u00fc\u0114\3\2\2\2\u00fd\u00fe\6"+ + "\17\20\3\u00fe\u0114\5\36\20\2\u00ff\u0100\6\17\21\3\u0100\u0101\t\n\2"+ + "\2\u0101\u0114\b\17\1\2\u0102\u0103\6\17\22\3\u0103\u0104\7F\2\2\u0104"+ + "\u0114\b\17\1\2\u0105\u0106\6\17\23\3\u0106\u0107\7G\2\2\u0107\u0114\b"+ + "\17\1\2\u0108\u0109\6\17\24\3\u0109\u010a\7H\2\2\u010a\u0114\b\17\1\2"+ + "\u010b\u010c\6\17\25\3\u010c\u010d\t\13\2\2\u010d\u0114\5\34\17\2\u010e"+ + "\u010f\7\t\2\2\u010f\u0110\5\22\n\2\u0110\u0111\7\n\2\2\u0111\u0112\5"+ + "\34\17\2\u0112\u0114\3\2\2\2\u0113\u00f6\3\2\2\2\u0113\u00f9\3\2\2\2\u0113"+ + "\u00fd\3\2\2\2\u0113\u00ff\3\2\2\2\u0113\u0102\3\2\2\2\u0113\u0105\3\2"+ + "\2\2\u0113\u0108\3\2\2\2\u0113\u010b\3\2\2\2\u0113\u010e\3\2\2\2\u0114"+ + "\35\3\2\2\2\u0115\u0119\5 \21\2\u0116\u0118\5\"\22\2\u0117\u0116\3\2\2"+ + "\2\u0118\u011b\3\2\2\2\u0119\u0117\3\2\2\2\u0119\u011a\3\2\2\2\u011a\u0138"+ + "\3\2\2\2\u011b\u0119\3\2\2\2\u011c\u011d\5\22\n\2\u011d\u0121\5$\23\2"+ + "\u011e\u0120\5\"\22\2\u011f\u011e\3\2\2\2\u0120\u0123\3\2\2\2\u0121\u011f"+ + "\3\2\2\2\u0121\u0122\3\2\2\2\u0122\u0138\3\2\2\2\u0123\u0121\3\2\2\2\u0124"+ + "\u0125\7\26\2\2\u0125\u012a\7I\2\2\u0126\u0127\7\7\2\2\u0127\u0128\5\32"+ + "\16\2\u0128\u0129\7\b\2\2\u0129\u012b\3\2\2\2\u012a\u0126\3\2\2\2\u012b"+ + "\u012c\3\2\2\2\u012c\u012a\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u0135\3\2"+ + "\2\2\u012e\u0132\5$\23\2\u012f\u0131\5\"\22\2\u0130\u012f\3\2\2\2\u0131"+ + "\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2\2\2\u0133\u0136\3\2"+ + "\2\2\u0134\u0132\3\2\2\2\u0135\u012e\3\2\2\2\u0135\u0136\3\2\2\2\u0136"+ + "\u0138\3\2\2\2\u0137\u0115\3\2\2\2\u0137\u011c\3\2\2\2\u0137\u0124\3\2"+ + "\2\2\u0138\37\3\2\2\2\u0139\u013a\6\21\26\3\u013a\u013b\7\t\2\2\u013b"+ + "\u013c\5\32\16\2\u013c\u013d\7\n\2\2\u013d\u013e\b\21\1\2\u013e\u014a"+ + "\3\2\2\2\u013f\u0140\6\21\27\3\u0140\u0141\7\t\2\2\u0141\u0142\5\34\17"+ + "\2\u0142\u0143\7\n\2\2\u0143\u014a\3\2\2\2\u0144\u014a\7E\2\2\u0145\u014a"+ + "\7J\2\2\u0146\u0147\7\26\2\2\u0147\u0148\7I\2\2\u0148\u014a\5(\25\2\u0149"+ + "\u0139\3\2\2\2\u0149\u013f\3\2\2\2\u0149\u0144\3\2\2\2\u0149\u0145\3\2"+ + "\2\2\u0149\u0146\3\2\2\2\u014a!\3\2\2\2\u014b\u014c\6\22\30\3\u014c\u0150"+ + "\5$\23\2\u014d\u014e\6\22\31\3\u014e\u0150\5&\24\2\u014f\u014b\3\2\2\2"+ + "\u014f\u014d\3\2\2\2\u0150#\3\2\2\2\u0151\u0152\7\13\2\2\u0152\u0153\7"+ + "L\2\2\u0153\u0157\5(\25\2\u0154\u0155\7\13\2\2\u0155\u0157\t\f\2\2\u0156"+ + "\u0151\3\2\2\2\u0156\u0154\3\2\2\2\u0157%\3\2\2\2\u0158\u0159\7\7\2\2"+ + "\u0159\u015a\5\32\16\2\u015a\u015b\7\b\2\2\u015b\'\3\2\2\2\u015c\u0165"+ + "\7\t\2\2\u015d\u0162\5\32\16\2\u015e\u015f\7\f\2\2\u015f\u0161\5\32\16"+ + "\2\u0160\u015e\3\2\2\2\u0161\u0164\3\2\2\2\u0162\u0160\3\2\2\2\u0162\u0163"+ + "\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0165\u015d\3\2\2\2\u0165"+ + "\u0166\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0168\7\n\2\2\u0168)\3\2\2\2"+ + " -:BOSW\\nw{\u0081\u008a\u0094\u009c\u00a2\u00b6\u00f1\u00f3\u0113\u0119"+ + "\u0121\u012c\u0132\u0135\u0137\u0149\u014f\u0156\u0162\u0165"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java index dad6e419e25..7d5ed4c9e6f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java @@ -101,14 +101,14 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitMultiple(PainlessParser.MultipleContext ctx) { return visitChildren(ctx); } + @Override public T visitTrailer(PainlessParser.TrailerContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSingle(PainlessParser.SingleContext ctx) { return visitChildren(ctx); } + @Override public T visitBlock(PainlessParser.BlockContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -116,13 +116,6 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitEmpty(PainlessParser.EmptyContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitEmptyscope(PainlessParser.EmptyscopeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -171,14 +164,14 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitIdentifier(PainlessParser.IdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitDelimiter(PainlessParser.DelimiterContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitGeneric(PainlessParser.GenericContext ctx) { return visitChildren(ctx); } + @Override public T visitSingle(PainlessParser.SingleContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -186,13 +179,6 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitComp(PainlessParser.CompContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -220,7 +206,28 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } + @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPre(PainlessParser.PreContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitPost(PainlessParser.PostContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitRead(PainlessParser.ReadContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -234,35 +241,14 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitUnary(PainlessParser.UnaryContext ctx) { return visitChildren(ctx); } + @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitPrecedence(PainlessParser.PrecedenceContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPreinc(PainlessParser.PreincContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitPostinc(PainlessParser.PostincContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitCast(PainlessParser.CastContext ctx) { return visitChildren(ctx); } + @Override public T visitFalse(PainlessParser.FalseContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -276,84 +262,98 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } + @Override public T visitOperator(PainlessParser.OperatorContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitTrue(PainlessParser.TrueContext ctx) { return visitChildren(ctx); } + @Override public T visitCast(PainlessParser.CastContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitChain(PainlessParser.ChainContext ctx) { return visitChildren(ctx); } + @Override public T visitDynamic(PainlessParser.DynamicContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkprec(PainlessParser.LinkprecContext ctx) { return visitChildren(ctx); } + @Override public T visitStatic(PainlessParser.StaticContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkcast(PainlessParser.LinkcastContext ctx) { return visitChildren(ctx); } + @Override public T visitNewarray(PainlessParser.NewarrayContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkbrace(PainlessParser.LinkbraceContext ctx) { return visitChildren(ctx); } + @Override public T visitExprprec(PainlessParser.ExprprecContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkdot(PainlessParser.LinkdotContext ctx) { return visitChildren(ctx); } + @Override public T visitChainprec(PainlessParser.ChainprecContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkcall(PainlessParser.LinkcallContext ctx) { return visitChildren(ctx); } + @Override public T visitString(PainlessParser.StringContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkvar(PainlessParser.LinkvarContext ctx) { return visitChildren(ctx); } + @Override public T visitVariable(PainlessParser.VariableContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkfield(PainlessParser.LinkfieldContext ctx) { return visitChildren(ctx); } + @Override public T visitNewobject(PainlessParser.NewobjectContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinknew(PainlessParser.LinknewContext ctx) { return visitChildren(ctx); } + @Override public T visitSecondary(PainlessParser.SecondaryContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitLinkstring(PainlessParser.LinkstringContext ctx) { return visitChildren(ctx); } + @Override public T visitCallinvoke(PainlessParser.CallinvokeContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFieldaccess(PainlessParser.FieldaccessContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitBraceaccess(PainlessParser.BraceaccessContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java index 7a3b594ab15..ab1254e1a91 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java @@ -94,31 +94,23 @@ interface PainlessParserVisitor extends ParseTreeVisitor { */ T visitExpr(PainlessParser.ExprContext ctx); /** - * Visit a parse tree produced by the {@code multiple} - * labeled alternative in {@link PainlessParser#block}. + * Visit a parse tree produced by {@link PainlessParser#trailer}. * @param ctx the parse tree * @return the visitor result */ - T visitMultiple(PainlessParser.MultipleContext ctx); + T visitTrailer(PainlessParser.TrailerContext ctx); /** - * Visit a parse tree produced by the {@code single} - * labeled alternative in {@link PainlessParser#block}. + * Visit a parse tree produced by {@link PainlessParser#block}. * @param ctx the parse tree * @return the visitor result */ - T visitSingle(PainlessParser.SingleContext ctx); + T visitBlock(PainlessParser.BlockContext ctx); /** * Visit a parse tree produced by {@link PainlessParser#empty}. * @param ctx the parse tree * @return the visitor result */ T visitEmpty(PainlessParser.EmptyContext ctx); - /** - * Visit a parse tree produced by {@link PainlessParser#emptyscope}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitEmptyscope(PainlessParser.EmptyscopeContext ctx); /** * Visit a parse tree produced by {@link PainlessParser#initializer}. * @param ctx the parse tree @@ -156,17 +148,18 @@ interface PainlessParserVisitor extends ParseTreeVisitor { */ T visitTrap(PainlessParser.TrapContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#identifier}. + * Visit a parse tree produced by {@link PainlessParser#delimiter}. * @param ctx the parse tree * @return the visitor result */ - T visitIdentifier(PainlessParser.IdentifierContext ctx); + T visitDelimiter(PainlessParser.DelimiterContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#generic}. + * Visit a parse tree produced by the {@code single} + * labeled alternative in {@link PainlessParser#expression}. * @param ctx the parse tree * @return the visitor result */ - T visitGeneric(PainlessParser.GenericContext ctx); + T visitSingle(PainlessParser.SingleContext ctx); /** * Visit a parse tree produced by the {@code comp} * labeled alternative in {@link PainlessParser#expression}. @@ -174,13 +167,6 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitComp(PainlessParser.CompContext ctx); - /** - * Visit a parse tree produced by the {@code read} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitRead(PainlessParser.ReadContext ctx); /** * Visit a parse tree produced by the {@code bool} * labeled alternative in {@link PainlessParser#expression}. @@ -202,62 +188,6 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitAssignment(PainlessParser.AssignmentContext ctx); - /** - * Visit a parse tree produced by the {@code false} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFalse(PainlessParser.FalseContext ctx); - /** - * Visit a parse tree produced by the {@code numeric} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNumeric(PainlessParser.NumericContext ctx); - /** - * Visit a parse tree produced by the {@code unary} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitUnary(PainlessParser.UnaryContext ctx); - /** - * Visit a parse tree produced by the {@code precedence} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPrecedence(PainlessParser.PrecedenceContext ctx); - /** - * Visit a parse tree produced by the {@code preinc} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPreinc(PainlessParser.PreincContext ctx); - /** - * Visit a parse tree produced by the {@code postinc} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitPostinc(PainlessParser.PostincContext ctx); - /** - * Visit a parse tree produced by the {@code cast} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitCast(PainlessParser.CastContext ctx); - /** - * Visit a parse tree produced by the {@code null} - * labeled alternative in {@link PainlessParser#expression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitNull(PainlessParser.NullContext ctx); /** * Visit a parse tree produced by the {@code binary} * labeled alternative in {@link PainlessParser#expression}. @@ -265,73 +195,152 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitBinary(PainlessParser.BinaryContext ctx); + /** + * Visit a parse tree produced by the {@code pre} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPre(PainlessParser.PreContext ctx); + /** + * Visit a parse tree produced by the {@code post} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPost(PainlessParser.PostContext ctx); + /** + * Visit a parse tree produced by the {@code read} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitRead(PainlessParser.ReadContext ctx); + /** + * Visit a parse tree produced by the {@code numeric} + * labeled alternative in {@link PainlessParser#unary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNumeric(PainlessParser.NumericContext ctx); /** * Visit a parse tree produced by the {@code true} - * labeled alternative in {@link PainlessParser#expression}. + * labeled alternative in {@link PainlessParser#unary}. * @param ctx the parse tree * @return the visitor result */ T visitTrue(PainlessParser.TrueContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#chain}. + * Visit a parse tree produced by the {@code false} + * labeled alternative in {@link PainlessParser#unary}. * @param ctx the parse tree * @return the visitor result */ - T visitChain(PainlessParser.ChainContext ctx); + T visitFalse(PainlessParser.FalseContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkprec}. + * Visit a parse tree produced by the {@code null} + * labeled alternative in {@link PainlessParser#unary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkprec(PainlessParser.LinkprecContext ctx); + T visitNull(PainlessParser.NullContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkcast}. + * Visit a parse tree produced by the {@code operator} + * labeled alternative in {@link PainlessParser#unary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkcast(PainlessParser.LinkcastContext ctx); + T visitOperator(PainlessParser.OperatorContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkbrace}. + * Visit a parse tree produced by the {@code cast} + * labeled alternative in {@link PainlessParser#unary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkbrace(PainlessParser.LinkbraceContext ctx); + T visitCast(PainlessParser.CastContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkdot}. + * Visit a parse tree produced by the {@code dynamic} + * labeled alternative in {@link PainlessParser#chain}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkdot(PainlessParser.LinkdotContext ctx); + T visitDynamic(PainlessParser.DynamicContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkcall}. + * Visit a parse tree produced by the {@code static} + * labeled alternative in {@link PainlessParser#chain}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkcall(PainlessParser.LinkcallContext ctx); + T visitStatic(PainlessParser.StaticContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkvar}. + * Visit a parse tree produced by the {@code newarray} + * labeled alternative in {@link PainlessParser#chain}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkvar(PainlessParser.LinkvarContext ctx); + T visitNewarray(PainlessParser.NewarrayContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkfield}. + * Visit a parse tree produced by the {@code exprprec} + * labeled alternative in {@link PainlessParser#primary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkfield(PainlessParser.LinkfieldContext ctx); + T visitExprprec(PainlessParser.ExprprecContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linknew}. + * Visit a parse tree produced by the {@code chainprec} + * labeled alternative in {@link PainlessParser#primary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinknew(PainlessParser.LinknewContext ctx); + T visitChainprec(PainlessParser.ChainprecContext ctx); /** - * Visit a parse tree produced by {@link PainlessParser#linkstring}. + * Visit a parse tree produced by the {@code string} + * labeled alternative in {@link PainlessParser#primary}. * @param ctx the parse tree * @return the visitor result */ - T visitLinkstring(PainlessParser.LinkstringContext ctx); + T visitString(PainlessParser.StringContext ctx); + /** + * Visit a parse tree produced by the {@code variable} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitVariable(PainlessParser.VariableContext ctx); + /** + * Visit a parse tree produced by the {@code newobject} + * labeled alternative in {@link PainlessParser#primary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitNewobject(PainlessParser.NewobjectContext ctx); + /** + * Visit a parse tree produced by {@link PainlessParser#secondary}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitSecondary(PainlessParser.SecondaryContext ctx); + /** + * Visit a parse tree produced by the {@code callinvoke} + * labeled alternative in {@link PainlessParser#dot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCallinvoke(PainlessParser.CallinvokeContext ctx); + /** + * Visit a parse tree produced by the {@code fieldaccess} + * labeled alternative in {@link PainlessParser#dot}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFieldaccess(PainlessParser.FieldaccessContext ctx); + /** + * Visit a parse tree produced by the {@code braceaccess} + * labeled alternative in {@link PainlessParser#brace}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitBraceaccess(PainlessParser.BraceaccessContext ctx); /** * Visit a parse tree produced by {@link PainlessParser#arguments}. * @param ctx the parse tree diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 4f6e2f5e87c..3669ab24ecc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -20,59 +20,68 @@ package org.elasticsearch.painless.antlr; import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.DiagnosticErrorListener; import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Recognizer; +import org.antlr.v4.runtime.atn.PredictionMode; +import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.Variables.Reserved; import org.elasticsearch.painless.antlr.PainlessParser.AfterthoughtContext; import org.elasticsearch.painless.antlr.PainlessParser.ArgumentsContext; import org.elasticsearch.painless.antlr.PainlessParser.AssignmentContext; import org.elasticsearch.painless.antlr.PainlessParser.BinaryContext; +import org.elasticsearch.painless.antlr.PainlessParser.BlockContext; +import org.elasticsearch.painless.antlr.PainlessParser.BoolContext; +import org.elasticsearch.painless.antlr.PainlessParser.BraceaccessContext; import org.elasticsearch.painless.antlr.PainlessParser.BreakContext; +import org.elasticsearch.painless.antlr.PainlessParser.CallinvokeContext; import org.elasticsearch.painless.antlr.PainlessParser.CastContext; +import org.elasticsearch.painless.antlr.PainlessParser.ChainprecContext; +import org.elasticsearch.painless.antlr.PainlessParser.CompContext; import org.elasticsearch.painless.antlr.PainlessParser.ConditionalContext; import org.elasticsearch.painless.antlr.PainlessParser.ContinueContext; import org.elasticsearch.painless.antlr.PainlessParser.DeclContext; import org.elasticsearch.painless.antlr.PainlessParser.DeclarationContext; import org.elasticsearch.painless.antlr.PainlessParser.DecltypeContext; import org.elasticsearch.painless.antlr.PainlessParser.DeclvarContext; +import org.elasticsearch.painless.antlr.PainlessParser.DelimiterContext; import org.elasticsearch.painless.antlr.PainlessParser.DoContext; +import org.elasticsearch.painless.antlr.PainlessParser.DynamicContext; import org.elasticsearch.painless.antlr.PainlessParser.EmptyContext; -import org.elasticsearch.painless.antlr.PainlessParser.EmptyscopeContext; import org.elasticsearch.painless.antlr.PainlessParser.ExprContext; import org.elasticsearch.painless.antlr.PainlessParser.ExpressionContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkbraceContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkcallContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkcastContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkdotContext; -import org.elasticsearch.painless.antlr.PainlessParser.ReadContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkfieldContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinknewContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkprecContext; -import org.elasticsearch.painless.antlr.PainlessParser.ChainContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkstringContext; -import org.elasticsearch.painless.antlr.PainlessParser.LinkvarContext; +import org.elasticsearch.painless.antlr.PainlessParser.ExprprecContext; import org.elasticsearch.painless.antlr.PainlessParser.FalseContext; +import org.elasticsearch.painless.antlr.PainlessParser.FieldaccessContext; import org.elasticsearch.painless.antlr.PainlessParser.ForContext; -import org.elasticsearch.painless.antlr.PainlessParser.GenericContext; -import org.elasticsearch.painless.antlr.PainlessParser.IdentifierContext; import org.elasticsearch.painless.antlr.PainlessParser.IfContext; import org.elasticsearch.painless.antlr.PainlessParser.InitializerContext; -import org.elasticsearch.painless.antlr.PainlessParser.MultipleContext; +import org.elasticsearch.painless.antlr.PainlessParser.NewarrayContext; +import org.elasticsearch.painless.antlr.PainlessParser.NewobjectContext; import org.elasticsearch.painless.antlr.PainlessParser.NullContext; import org.elasticsearch.painless.antlr.PainlessParser.NumericContext; -import org.elasticsearch.painless.antlr.PainlessParser.PostincContext; -import org.elasticsearch.painless.antlr.PainlessParser.PrecedenceContext; -import org.elasticsearch.painless.antlr.PainlessParser.PreincContext; +import org.elasticsearch.painless.antlr.PainlessParser.OperatorContext; +import org.elasticsearch.painless.antlr.PainlessParser.PostContext; +import org.elasticsearch.painless.antlr.PainlessParser.PreContext; +import org.elasticsearch.painless.antlr.PainlessParser.ReadContext; import org.elasticsearch.painless.antlr.PainlessParser.ReturnContext; +import org.elasticsearch.painless.antlr.PainlessParser.SecondaryContext; import org.elasticsearch.painless.antlr.PainlessParser.SingleContext; import org.elasticsearch.painless.antlr.PainlessParser.SourceContext; import org.elasticsearch.painless.antlr.PainlessParser.StatementContext; +import org.elasticsearch.painless.antlr.PainlessParser.StaticContext; +import org.elasticsearch.painless.antlr.PainlessParser.StringContext; import org.elasticsearch.painless.antlr.PainlessParser.ThrowContext; +import org.elasticsearch.painless.antlr.PainlessParser.TrailerContext; import org.elasticsearch.painless.antlr.PainlessParser.TrapContext; import org.elasticsearch.painless.antlr.PainlessParser.TrueContext; import org.elasticsearch.painless.antlr.PainlessParser.TryContext; import org.elasticsearch.painless.antlr.PainlessParser.UnaryContext; +import org.elasticsearch.painless.antlr.PainlessParser.VariableContext; import org.elasticsearch.painless.antlr.PainlessParser.WhileContext; import org.elasticsearch.painless.node.AExpression; import org.elasticsearch.painless.node.ALink; @@ -81,11 +90,11 @@ import org.elasticsearch.painless.node.AStatement; import org.elasticsearch.painless.node.EBinary; import org.elasticsearch.painless.node.EBool; import org.elasticsearch.painless.node.EBoolean; -import org.elasticsearch.painless.node.EExplicit; import org.elasticsearch.painless.node.EChain; import org.elasticsearch.painless.node.EComp; import org.elasticsearch.painless.node.EConditional; import org.elasticsearch.painless.node.EDecimal; +import org.elasticsearch.painless.node.EExplicit; import org.elasticsearch.painless.node.ENull; import org.elasticsearch.painless.node.ENumeric; import org.elasticsearch.painless.node.EUnary; @@ -95,21 +104,23 @@ import org.elasticsearch.painless.node.LCast; import org.elasticsearch.painless.node.LField; import org.elasticsearch.painless.node.LNewArray; import org.elasticsearch.painless.node.LNewObj; +import org.elasticsearch.painless.node.LStatic; import org.elasticsearch.painless.node.LString; import org.elasticsearch.painless.node.LVariable; import org.elasticsearch.painless.node.SBlock; import org.elasticsearch.painless.node.SBreak; +import org.elasticsearch.painless.node.SCatch; import org.elasticsearch.painless.node.SContinue; import org.elasticsearch.painless.node.SDeclBlock; import org.elasticsearch.painless.node.SDeclaration; import org.elasticsearch.painless.node.SDo; import org.elasticsearch.painless.node.SExpression; import org.elasticsearch.painless.node.SFor; +import org.elasticsearch.painless.node.SIf; import org.elasticsearch.painless.node.SIfElse; import org.elasticsearch.painless.node.SReturn; import org.elasticsearch.painless.node.SSource; import org.elasticsearch.painless.node.SThrow; -import org.elasticsearch.painless.node.STrap; import org.elasticsearch.painless.node.STry; import org.elasticsearch.painless.node.SWhile; @@ -119,343 +130,308 @@ import java.util.List; /** * Converts the ANTLR tree to a Painless tree. */ -public final class Walker extends PainlessParserBaseVisitor { +public final class Walker extends PainlessParserBaseVisitor { - public static SSource buildPainlessTree(final String source, final Reserved reserved) { - return new Walker(source, reserved).source; + public static SSource buildPainlessTree(String source, Reserved reserved, CompilerSettings settings) { + return new Walker(source, reserved, settings).source; } private final Reserved reserved; private final SSource source; + private final CompilerSettings settings; - private Walker(final String source, final Reserved reserved) { + private Walker(String source, Reserved reserved, CompilerSettings settings) { this.reserved = reserved; + this.settings = settings; this.source = (SSource)visit(buildAntlrTree(source)); } - private SourceContext buildAntlrTree(final String source) { - final ANTLRInputStream stream = new ANTLRInputStream(source); - final PainlessLexer lexer = new ErrorHandlingLexer(stream); - final PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); - final ParserErrorStrategy strategy = new ParserErrorStrategy(); + private SourceContext buildAntlrTree(String source) { + ANTLRInputStream stream = new ANTLRInputStream(source); + PainlessLexer lexer = new ErrorHandlingLexer(stream); + PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); + ParserErrorStrategy strategy = new ParserErrorStrategy(); lexer.removeErrorListeners(); parser.removeErrorListeners(); + + if (settings.isPicky()) { + setupPicky(parser); + } + parser.setErrorHandler(strategy); return parser.source(); } - private int line(final ParserRuleContext ctx) { + private void setupPicky(PainlessParser parser) { + // Diagnostic listener invokes syntaxError on other listeners for ambiguity issues, + parser.addErrorListener(new DiagnosticErrorListener(true)); + // a second listener to fail the test when the above happens. + parser.addErrorListener(new BaseErrorListener() { + @Override + public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, + final int charPositionInLine, final String msg, final RecognitionException e) { + throw new AssertionError("line: " + line + ", offset: " + charPositionInLine + + ", symbol:" + offendingSymbol + " " + msg); + } + }); + + // Enable exact ambiguity detection (costly). we enable exact since its the default for + // DiagnosticErrorListener, life is too short to think about what 'inexact ambiguity' might mean. + parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); + } + + private int line(ParserRuleContext ctx) { return ctx.getStart().getLine(); } - private String location(final ParserRuleContext ctx) { + private int offset(ParserRuleContext ctx) { + return ctx.getStart().getStartIndex(); + } + + private String location(ParserRuleContext ctx) { return "[ " + ctx.getStart().getLine() + " : " + ctx.getStart().getCharPositionInLine() + " ]"; } @Override - public ANode visitSource(final SourceContext ctx) { - final List statements = new ArrayList<>(); + public Object visitSource(SourceContext ctx) { + List statements = new ArrayList<>(); - for (final StatementContext statement : ctx.statement()) { + for (StatementContext statement : ctx.statement()) { statements.add((AStatement)visit(statement)); } - return new SSource(line(ctx), location(ctx), statements); + return new SSource(line(ctx), offset(ctx), location(ctx), statements); } @Override - public ANode visitIf(final IfContext ctx) { - final AExpression condition = (AExpression)visit(ctx.expression()); - final AStatement ifblock = (AStatement)visit(ctx.block(0)); - final AStatement elseblock = ctx.block(1) == null ? null : (AStatement)visit(ctx.block(1)); + public Object visitIf(IfContext ctx) { + AExpression expression = (AExpression)visitExpression(ctx.expression()); + SBlock ifblock = (SBlock)visit(ctx.trailer(0)); - return new SIfElse(line(ctx), location(ctx), condition, ifblock, elseblock); + if (ctx.trailer().size() > 1) { + SBlock elseblock = (SBlock)visit(ctx.trailer(1)); + + return new SIfElse(line(ctx), offset(ctx), location(ctx), expression, ifblock, elseblock); + } else { + return new SIf(line(ctx), offset(ctx), location(ctx), expression, ifblock); + } } @Override - public ANode visitWhile(final WhileContext ctx) { - final AExpression condition = (AExpression)visit(ctx.expression()); - final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block()); + public Object visitWhile(WhileContext ctx) { + if (settings.getMaxLoopCounter() > 0) { + reserved.usesLoop(); + } - reserved.usesLoop(); + AExpression expression = (AExpression)visitExpression(ctx.expression()); - return new SWhile(line(ctx), location(ctx), condition, block); + if (ctx.trailer() != null) { + SBlock block = (SBlock)visit(ctx.trailer()); + + return new SWhile(line(ctx), offset(ctx), location(ctx), settings.getMaxLoopCounter(), expression, block); + } else if (ctx.empty() != null) { + return new SWhile(line(ctx), offset(ctx), location(ctx), settings.getMaxLoopCounter(), expression, null); + } else { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } } @Override - public ANode visitDo(final DoContext ctx) { - final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block()); - final AExpression condition = (AExpression)visit(ctx.expression()); + public Object visitDo(DoContext ctx) { + if (settings.getMaxLoopCounter() > 0) { + reserved.usesLoop(); + } - reserved.usesLoop(); + AExpression expression = (AExpression)visitExpression(ctx.expression()); + SBlock block = (SBlock)visit(ctx.block()); - return new SDo(line(ctx), location(ctx), block, condition); + return new SDo(line(ctx), offset(ctx), location(ctx), settings.getMaxLoopCounter(), block, expression); } @Override - public ANode visitFor(final ForContext ctx) { - final ANode intializer = ctx.initializer() == null ? null : visit(ctx.initializer()); - final AExpression condition = ctx.expression() == null ? null : (AExpression)visit(ctx.expression()); - final AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought()); - final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block()); + public Object visitFor(ForContext ctx) { + if (settings.getMaxLoopCounter() > 0) { + reserved.usesLoop(); + } - reserved.usesLoop(); + ANode initializer = ctx.initializer() == null ? null : (ANode)visit(ctx.initializer()); + AExpression expression = ctx.expression() == null ? null : (AExpression)visitExpression(ctx.expression()); + AExpression afterthought = ctx.afterthought() == null ? null : (AExpression)visit(ctx.afterthought()); - return new SFor(line(ctx), location(ctx), intializer, condition, afterthought, block); + if (ctx.trailer() != null) { + SBlock block = (SBlock)visit(ctx.trailer()); + + return new SFor(line(ctx), offset(ctx), location(ctx), + settings.getMaxLoopCounter(), initializer, expression, afterthought, block); + } else if (ctx.empty() != null) { + return new SFor(line(ctx), offset(ctx), location(ctx), + settings.getMaxLoopCounter(), initializer, expression, afterthought, null); + } else { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } } @Override - public ANode visitDecl(final DeclContext ctx) { + public Object visitDecl(DeclContext ctx) { return visit(ctx.declaration()); } @Override - public ANode visitContinue(final ContinueContext ctx) { - return new SContinue(line(ctx), location(ctx)); + public Object visitContinue(ContinueContext ctx) { + return new SContinue(line(ctx), offset(ctx), location(ctx)); } @Override - public ANode visitBreak(final BreakContext ctx) { - return new SBreak(line(ctx), location(ctx)); + public Object visitBreak(BreakContext ctx) { + return new SBreak(line(ctx), offset(ctx), location(ctx)); } @Override - public ANode visitReturn(final ReturnContext ctx) { - final AExpression expression = (AExpression)visit(ctx.expression()); + public Object visitReturn(ReturnContext ctx) { + AExpression expression = (AExpression)visitExpression(ctx.expression()); - return new SReturn(line(ctx), location(ctx), expression); + return new SReturn(line(ctx), offset(ctx), location(ctx), expression); } @Override - public ANode visitTry(final TryContext ctx) { - final AStatement block = (AStatement)visit(ctx.block()); - final List traps = new ArrayList<>(); + public Object visitTry(TryContext ctx) { + SBlock block = (SBlock)visit(ctx.block()); + List catches = new ArrayList<>(); - for (final TrapContext trap : ctx.trap()) { - traps.add((STrap)visit(trap)); + for (TrapContext trap : ctx.trap()) { + catches.add((SCatch)visit(trap)); } - return new STry(line(ctx), location(ctx), block, traps); + return new STry(line(ctx), offset(ctx), location(ctx), block, catches); } @Override - public ANode visitThrow(final ThrowContext ctx) { - final AExpression expression = (AExpression)visit(ctx.expression()); + public Object visitThrow(ThrowContext ctx) { + AExpression expression = (AExpression)visitExpression(ctx.expression()); - return new SThrow(line(ctx), location(ctx), expression); + return new SThrow(line(ctx), offset(ctx), location(ctx), expression); } @Override - public ANode visitExpr(final ExprContext ctx) { - final AExpression expression = (AExpression)visit(ctx.expression()); + public Object visitExpr(ExprContext ctx) { + AExpression expression = (AExpression)visitExpression(ctx.expression()); - return new SExpression(line(ctx), location(ctx), expression); + return new SExpression(line(ctx), offset(ctx), location(ctx), expression); } @Override - public ANode visitMultiple(final MultipleContext ctx) { - final List statements = new ArrayList<>(); + public Object visitTrailer(TrailerContext ctx) { + if (ctx.block() != null) { + return visit(ctx.block()); + } else if (ctx.statement() != null) { + List statements = new ArrayList<>(); + statements.add((AStatement)visit(ctx.statement())); - for (final StatementContext statement : ctx.statement()) { - statements.add((AStatement)visit(statement)); + return new SBlock(line(ctx), offset(ctx), location(ctx), statements); + } else { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } - - return new SBlock(line(ctx), location(ctx), statements); } @Override - public ANode visitSingle(final SingleContext ctx) { - final List statements = new ArrayList<>(); - statements.add((AStatement)visit(ctx.statement())); + public Object visitBlock(BlockContext ctx) { + if (ctx.statement().isEmpty()) { + return null; + } else { + List statements = new ArrayList<>(); - return new SBlock(line(ctx), location(ctx), statements); + for (StatementContext statement : ctx.statement()) { + statements.add((AStatement)visit(statement)); + } + + return new SBlock(line(ctx), offset(ctx), location(ctx), statements); + } } @Override - public ANode visitEmpty(final EmptyContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitEmpty(EmptyContext ctx) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } @Override - public ANode visitEmptyscope(final EmptyscopeContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitInitializer(final InitializerContext ctx) { + public Object visitInitializer(InitializerContext ctx) { if (ctx.declaration() != null) { return visit(ctx.declaration()); } else if (ctx.expression() != null) { - return visit(ctx.expression()); - } - - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitAfterthought(final AfterthoughtContext ctx) { - return visit(ctx.expression()); - } - - @Override - public ANode visitDeclaration(final DeclarationContext ctx) { - final String type = ctx.decltype().getText(); - final List declarations = new ArrayList<>(); - - for (final DeclvarContext declvar : ctx.declvar()) { - final String name = declvar.identifier().getText(); - final AExpression expression = declvar.expression() == null ? null : (AExpression)visit(declvar.expression()); - declarations.add(new SDeclaration(line(ctx), location(ctx), type, name, expression)); - } - - return new SDeclBlock(line(ctx), location(ctx), declarations); - } - - @Override - public ANode visitDecltype(final DecltypeContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitDeclvar(final DeclvarContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitTrap(final TrapContext ctx) { - final String type = ctx.identifier(0).getText(); - final String name = ctx.identifier(1).getText(); - final AStatement block = ctx.block() == null ? null : (AStatement)visit(ctx.block()); - - return new STrap(line(ctx), location(ctx), type, name, block); - } - - @Override - public ANode visitIdentifier(final IdentifierContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitGeneric(final GenericContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitPrecedence(final PrecedenceContext ctx) { - return visit(ctx.expression()); - } - - @Override - public ANode visitNumeric(final NumericContext ctx) { - final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; - - if (ctx.DECIMAL() != null) { - return new EDecimal(line(ctx), location(ctx), (negate ? "-" : "") + ctx.DECIMAL().getText()); - } else if (ctx.HEX() != null) { - return new ENumeric(line(ctx), location(ctx), (negate ? "-" : "") + ctx.HEX().getText().substring(2), 16); - } else if (ctx.INTEGER() != null) { - return new ENumeric(line(ctx), location(ctx), (negate ? "-" : "") + ctx.INTEGER().getText(), 10); - } else if (ctx.OCTAL() != null) { - return new ENumeric(line(ctx), location(ctx), (negate ? "-" : "") + ctx.OCTAL().getText().substring(1), 8); + return visitExpression(ctx.expression()); } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } } @Override - public ANode visitTrue(final TrueContext ctx) { - return new EBoolean(line(ctx), location(ctx), true); + public Object visitAfterthought(AfterthoughtContext ctx) { + return visitExpression(ctx.expression()); } @Override - public ANode visitFalse(FalseContext ctx) { - return new EBoolean(line(ctx), location(ctx), false); + public Object visitDeclaration(DeclarationContext ctx) { + String type = ctx.decltype().getText(); + List declarations = new ArrayList<>(); + + for (DeclvarContext declvar : ctx.declvar()) { + String name = declvar.ID().getText(); + AExpression expression = declvar.expression() == null ? null : (AExpression)visitExpression(declvar.expression()); + + declarations.add(new SDeclaration(line(ctx), offset(ctx), location(ctx), type, name, expression)); + } + + return new SDeclBlock(line(ctx), offset(ctx), location(ctx), declarations); } @Override - public ANode visitNull(final NullContext ctx) { - return new ENull(line(ctx), location(ctx)); + public Object visitDecltype(DecltypeContext ctx) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } @Override - public ANode visitPostinc(final PostincContext ctx) { - final List links = new ArrayList<>(); - final Operation operation; + public Object visitDeclvar(DeclvarContext ctx) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } - visitChain(ctx.chain(), links); + @Override + public Object visitTrap(TrapContext ctx) { + String type = ctx.TYPE().getText(); + String name = ctx.ID().getText(); + SBlock block = (SBlock)visit(ctx.block()); - if (ctx.INCR() != null) { - operation = Operation.INCR; - } else if (ctx.DECR() != null) { - operation = Operation.DECR; + return new SCatch(line(ctx), offset(ctx), location(ctx), type, name, block); + } + + @Override + public Object visitDelimiter(DelimiterContext ctx) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } + + private Object visitExpression(ExpressionContext ctx) { + Object expression = visit(ctx); + + if (expression instanceof List) { + @SuppressWarnings("unchecked") + List links = (List)expression; + + return new EChain(line(ctx), offset(ctx), location(ctx), links, false, false, null, null); } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - return new EChain(line(ctx), location(ctx), links, false, true, operation, null); - } - - @Override - public ANode visitPreinc(final PreincContext ctx) { - final List links = new ArrayList<>(); - final Operation operation; - - visitChain(ctx.chain(), links); - - if (ctx.INCR() != null) { - operation = Operation.INCR; - } else if (ctx.DECR() != null) { - operation = Operation.DECR; - } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - return new EChain(line(ctx), location(ctx), links, true, false, operation, null); - } - - @Override - public ANode visitRead(final ReadContext ctx) { - final List links = new ArrayList<>(); - - visitChain(ctx.chain(), links); - - return new EChain(line(ctx), location(ctx), links, false, false, null, null); - } - - @Override - public ANode visitUnary(final UnaryContext ctx) { - if (ctx.SUB() != null && ctx.expression() instanceof NumericContext) { - return visit(ctx.expression()); - } else { - final Operation operation; - - if (ctx.BOOLNOT() != null) { - operation = Operation.NOT; - } else if (ctx.BWNOT() != null) { - operation = Operation.BWNOT; - } else if (ctx.ADD() != null) { - operation = Operation.ADD; - } else if (ctx.SUB() != null) { - operation = Operation.SUB; - } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - return new EUnary(line(ctx), location(ctx), operation, (AExpression)visit(ctx.expression())); + return expression; } } @Override - public ANode visitCast(final CastContext ctx) { - return new EExplicit(line(ctx), location(ctx), ctx.decltype().getText(), (AExpression)visit(ctx.expression())); + public Object visitSingle(SingleContext ctx) { + return visit(ctx.unary()); } @Override - public ANode visitBinary(final BinaryContext ctx) { - final AExpression left = (AExpression)visit(ctx.expression(0)); - final AExpression right = (AExpression)visit(ctx.expression(1)); + public Object visitBinary(BinaryContext ctx) { + AExpression left = (AExpression)visitExpression(ctx.expression(0)); + AExpression right = (AExpression)visitExpression(ctx.expression(1)); final Operation operation; if (ctx.MUL() != null) { @@ -484,13 +460,13 @@ public final class Walker extends PainlessParserBaseVisitor { throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); } - return new EBinary(line(ctx), location(ctx), operation, left, right); + return new EBinary(line(ctx), offset(ctx), location(ctx), operation, left, right); } @Override - public ANode visitComp(PainlessParser.CompContext ctx) { - final AExpression left = (AExpression)visit(ctx.expression(0)); - final AExpression right = (AExpression)visit(ctx.expression(1)); + public Object visitComp(CompContext ctx) { + AExpression left = (AExpression)visitExpression(ctx.expression(0)); + AExpression right = (AExpression)visitExpression(ctx.expression(1)); final Operation operation; if (ctx.LT() != null) { @@ -513,13 +489,13 @@ public final class Walker extends PainlessParserBaseVisitor { throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); } - return new EComp(line(ctx), location(ctx), operation, left, right); + return new EComp(line(ctx), offset(ctx), location(ctx), operation, left, right); } @Override - public ANode visitBool(PainlessParser.BoolContext ctx) { - final AExpression left = (AExpression)visit(ctx.expression(0)); - final AExpression right = (AExpression)visit(ctx.expression(1)); + public Object visitBool(BoolContext ctx) { + AExpression left = (AExpression)visitExpression(ctx.expression(0)); + AExpression right = (AExpression)visitExpression(ctx.expression(1)); final Operation operation; if (ctx.BOOLAND() != null) { @@ -530,27 +506,27 @@ public final class Walker extends PainlessParserBaseVisitor { throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); } - return new EBool(line(ctx), location(ctx), operation, left, right); - } - - - @Override - public ANode visitConditional(final ConditionalContext ctx) { - final AExpression condition = (AExpression)visit(ctx.expression(0)); - final AExpression left = (AExpression)visit(ctx.expression(1)); - final AExpression right = (AExpression)visit(ctx.expression(2)); - - return new EConditional(line(ctx), location(ctx), condition, left, right); + return new EBool(line(ctx), offset(ctx), location(ctx), operation, left, right); } @Override - public ANode visitAssignment(final AssignmentContext ctx) { - final List links = new ArrayList<>(); + public Object visitConditional(ConditionalContext ctx) { + AExpression condition = (AExpression)visitExpression(ctx.expression(0)); + AExpression left = (AExpression)visitExpression(ctx.expression(1)); + AExpression right = (AExpression)visitExpression(ctx.expression(2)); + + return new EConditional(line(ctx), offset(ctx), location(ctx), condition, left, right); + } + + @Override + public Object visitAssignment(AssignmentContext ctx) { + @SuppressWarnings("unchecked") + List links = (List)visit(ctx.chain()); final Operation operation; - visitChain(ctx.chain(), links); - - if (ctx.AMUL() != null) { + if (ctx.ASSIGN() != null) { + operation = null; + } else if (ctx.AMUL() != null) { operation = Operation.MUL; } else if (ctx.ADIV() != null) { operation = Operation.DIV; @@ -573,222 +549,290 @@ public final class Walker extends PainlessParserBaseVisitor { } else if (ctx.AOR() != null) { operation = Operation.BWOR; } else { - operation = null; + throw new IllegalStateException("Error " + location(ctx) + ": Illegal tree structure."); } - return new EChain(line(ctx), location(ctx), links, false, false, operation, (AExpression)visit(ctx.expression())); + AExpression expression = (AExpression)visitExpression(ctx.expression()); + + return new EChain(line(ctx), offset(ctx), location(ctx), links, false, false, operation, expression); } - private void visitChain(final ChainContext ctx, final List links) { - if (ctx.linkprec() != null) { - visitLinkprec(ctx.linkprec(), links); - } else if (ctx.linkcast() != null) { - visitLinkcast(ctx.linkcast(), links); - } else if (ctx.linkvar() != null) { - visitLinkvar(ctx.linkvar(), links); - } else if (ctx.linknew() != null) { - visitLinknew(ctx.linknew(), links); - } else if (ctx.linkstring() != null) { - visitLinkstring(ctx.linkstring(), links); + private Object visitUnary(UnaryContext ctx) { + Object expression = visit(ctx); + + if (expression instanceof List) { + @SuppressWarnings("unchecked") + List links = (List)expression; + + return new EChain(line(ctx), offset(ctx), location(ctx), links, false, false, null, null); } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + return expression; } } @Override - public ANode visitChain(final ChainContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } + public Object visitPre(PreContext ctx) { + @SuppressWarnings("unchecked") + List links = (List)visit(ctx.chain()); + final Operation operation; - private void visitLinkprec(final LinkprecContext ctx, final List links) { - if (ctx.linkprec() != null) { - visitLinkprec(ctx.linkprec(), links); - } else if (ctx.linkcast() != null) { - visitLinkcast(ctx.linkcast(), links); - } else if (ctx.linkvar() != null) { - visitLinkvar(ctx.linkvar(), links); - } else if (ctx.linknew() != null) { - visitLinknew(ctx.linknew(), links); - } else if (ctx.linkstring() != null) { - visitLinkstring(ctx.linkstring(), links); + if (ctx.INCR() != null) { + operation = Operation.INCR; + } else if (ctx.DECR() != null) { + operation = Operation.DECR; } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + throw new IllegalStateException("Error " + location(ctx) + ": Illegal tree structure."); } - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); - } + return new EChain(line(ctx), offset(ctx), location(ctx), links, true, false, operation, null); } @Override - public ANode visitLinkprec(final LinkprecContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } + public Object visitPost(PostContext ctx) { + @SuppressWarnings("unchecked") + List links = (List)visit(ctx.chain()); + final Operation operation; - private void visitLinkcast(final LinkcastContext ctx, final List links) { - if (ctx.linkprec() != null) { - visitLinkprec(ctx.linkprec(), links); - } else if (ctx.linkcast() != null) { - visitLinkcast(ctx.linkcast(), links); - } else if (ctx.linkvar() != null) { - visitLinkvar(ctx.linkvar(), links); - } else if (ctx.linknew() != null) { - visitLinknew(ctx.linknew(), links); - } else if (ctx.linkstring() != null) { - visitLinkstring(ctx.linkstring(), links); + if (ctx.INCR() != null) { + operation = Operation.INCR; + } else if (ctx.DECR() != null) { + operation = Operation.DECR; } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + throw new IllegalStateException("Error " + location(ctx) + ": Illegal tree structure."); } - links.add(new LCast(line(ctx), location(ctx), ctx.decltype().getText())); + return new EChain(line(ctx), offset(ctx), location(ctx), links, false, true, operation, null); } @Override - public ANode visitLinkcast(final LinkcastContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitRead(ReadContext ctx) { + return visit(ctx.chain()); } - private void visitLinkbrace(final LinkbraceContext ctx, final List links) { - links.add(new LBrace(line(ctx), location(ctx), (AExpression)visit(ctx.expression()))); + @Override + public Object visitNumeric(NumericContext ctx) { + final boolean negate = ctx.parent instanceof OperatorContext && ((OperatorContext)ctx.parent).SUB() != null; - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); + if (ctx.DECIMAL() != null) { + return new EDecimal(line(ctx), offset(ctx), location(ctx), (negate ? "-" : "") + ctx.DECIMAL().getText()); + } else if (ctx.HEX() != null) { + return new ENumeric(line(ctx), offset(ctx), location(ctx), (negate ? "-" : "") + ctx.HEX().getText().substring(2), 16); + } else if (ctx.INTEGER() != null) { + return new ENumeric(line(ctx), offset(ctx), location(ctx), (negate ? "-" : "") + ctx.INTEGER().getText(), 10); + } else if (ctx.OCTAL() != null) { + return new ENumeric(line(ctx), offset(ctx), location(ctx), (negate ? "-" : "") + ctx.OCTAL().getText().substring(1), 8); + } else { + throw new IllegalStateException("Error " + location(ctx) + ": Illegal tree structure."); } } @Override - public ANode visitLinkbrace(final LinkbraceContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitTrue(TrueContext ctx) { + return new EBoolean(line(ctx), offset(ctx), location(ctx), true); } - private void visitLinkdot(final LinkdotContext ctx, final List links) { - if (ctx.linkcall() != null) { - visitLinkcall(ctx.linkcall(), links); - } else if (ctx.linkfield() != null) { - visitLinkfield(ctx.linkfield(), links); + @Override + public Object visitFalse(FalseContext ctx) { + return new EBoolean(line(ctx), offset(ctx), location(ctx), false); + } + + @Override + public Object visitNull(NullContext ctx) { + return new ENull(line(ctx), offset(ctx), location(ctx)); + } + + @Override + public Object visitOperator(OperatorContext ctx) { + if (ctx.SUB() != null && ctx.unary() instanceof NumericContext) { + return visit(ctx.unary()); + } else { + AExpression expression = (AExpression)visitUnary(ctx.unary()); + final Operation operation; + + if (ctx.BOOLNOT() != null) { + operation = Operation.NOT; + } else if (ctx.BWNOT() != null) { + operation = Operation.BWNOT; + } else if (ctx.ADD() != null) { + operation = Operation.ADD; + } else if (ctx.SUB() != null) { + operation = Operation.SUB; + } else { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } + + return new EUnary(line(ctx), offset(ctx), location(ctx), operation, expression); } } @Override - public ANode visitLinkdot(final LinkdotContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } + public Object visitCast(CastContext ctx) { + String type = ctx.decltype().getText(); + Object child = visit(ctx.unary()); - private void visitLinkcall(final LinkcallContext ctx, final List links) { - final List arguments = new ArrayList<>(); + if (child instanceof List) { + @SuppressWarnings("unchecked") + List links = (List)child; + links.add(new LCast(line(ctx), offset(ctx), location(ctx), type)); - for (final ExpressionContext expression : ctx.arguments().expression()) { - arguments.add((AExpression)visit(expression)); - } - - links.add(new LCall(line(ctx), location(ctx), ctx.EXTID().getText(), arguments)); - - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); + return links; + } else { + return new EExplicit(line(ctx), offset(ctx), location(ctx), type, (AExpression)child); } } @Override - public ANode visitLinkcall(final LinkcallContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitDynamic(DynamicContext ctx) { + Object child = visit(ctx.primary()); + + if (child instanceof List) { + @SuppressWarnings("unchecked") + List links = (List)child; + + for (SecondaryContext secondary : ctx.secondary()) { + links.add((ALink)visit(secondary)); + } + + return links; + } else if (!ctx.secondary().isEmpty()) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } else { + return child; + } } - private void visitLinkvar(final LinkvarContext ctx, final List links) { - final String name = ctx.identifier().getText(); + @Override + public Object visitStatic(StaticContext ctx) { + String type = ctx.decltype().getText(); + List links = new ArrayList<>(); + + links.add(new LStatic(line(ctx), offset(ctx), location(ctx), type)); + links.add((ALink)visit(ctx.dot())); + + for (SecondaryContext secondary : ctx.secondary()) { + links.add((ALink)visit(secondary)); + } + + return links; + } + + @Override + public Object visitNewarray(NewarrayContext ctx) { + String type = ctx.TYPE().getText(); + List expressions = new ArrayList<>(); + + for (ExpressionContext expression : ctx.expression()) { + expressions.add((AExpression)visitExpression(expression)); + } + + List links = new ArrayList<>(); + links.add(new LNewArray(line(ctx), offset(ctx), location(ctx), type, expressions)); + + if (ctx.dot() != null) { + links.add((ALink)visit(ctx.dot())); + + for (SecondaryContext secondary : ctx.secondary()) { + links.add((ALink)visit(secondary)); + } + } else if (!ctx.secondary().isEmpty()) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); + } + + return links; + } + + @Override + public Object visitExprprec(ExprprecContext ctx) { + return visit(ctx.expression()); + } + + @Override + public Object visitChainprec(ChainprecContext ctx) { + return visit(ctx.unary()); + } + + @Override + public Object visitString(StringContext ctx) { + String string = ctx.STRING().getText().substring(1, ctx.STRING().getText().length() - 1); + List links = new ArrayList<>(); + links.add(new LString(line(ctx), offset(ctx), location(ctx), string)); + + return links; + } + + @Override + public Object visitVariable(VariableContext ctx) { + String name = ctx.ID().getText(); + List links = new ArrayList<>(); + links.add(new LVariable(line(ctx), offset(ctx), location(ctx), name)); reserved.markReserved(name); - links.add(new LVariable(line(ctx), location(ctx), name)); + return links; + } - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); + @Override + public Object visitNewobject(NewobjectContext ctx) { + String type = ctx.TYPE().getText(); + List arguments = new ArrayList<>(); + + for (ExpressionContext expression : ctx.arguments().expression()) { + arguments.add((AExpression)visitExpression(expression)); + } + + List links = new ArrayList<>(); + links.add(new LNewObj(line(ctx), offset(ctx), location(ctx), type, arguments)); + + return links; + } + + @Override + public Object visitSecondary(SecondaryContext ctx) { + if (ctx.dot() != null) { + return visit(ctx.dot()); + } else if (ctx.brace() != null) { + return visit(ctx.brace()); + } else { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } } @Override - public ANode visitLinkvar(final LinkvarContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitCallinvoke(CallinvokeContext ctx) { + String name = ctx.DOTID().getText(); + List arguments = new ArrayList<>(); + + for (ExpressionContext expression : ctx.arguments().expression()) { + arguments.add((AExpression)visitExpression(expression)); + } + + return new LCall(line(ctx), offset(ctx), location(ctx), name, arguments); } - private void visitLinkfield(final LinkfieldContext ctx, final List links) { + @Override + public Object visitFieldaccess(FieldaccessContext ctx) { final String value; - if (ctx.EXTID() != null) { - value = ctx.EXTID().getText(); - } else if (ctx.EXTINTEGER() != null) { - value = ctx.EXTINTEGER().getText(); + if (ctx.DOTID() != null) { + value = ctx.DOTID().getText(); + } else if (ctx.DOTINTEGER() != null) { + value = ctx.DOTINTEGER().getText(); } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } - links.add(new LField(line(ctx), location(ctx), value)); - - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); - } + return new LField(line(ctx), offset(ctx), location(ctx), value); } @Override - public ANode visitLinkfield(final LinkfieldContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } + public Object visitBraceaccess(BraceaccessContext ctx) { + AExpression expression = (AExpression)visitExpression(ctx.expression()); - private void visitLinknew(final LinknewContext ctx, final List links) { - final List arguments = new ArrayList<>(); - - if (ctx.arguments() != null) { - for (final ExpressionContext expression : ctx.arguments().expression()) { - arguments.add((AExpression)visit(expression)); - } - - links.add(new LNewObj(line(ctx), location(ctx), ctx.identifier().getText(), arguments)); - } else if (ctx.expression().size() > 0) { - for (final ExpressionContext expression : ctx.expression()) { - arguments.add((AExpression)visit(expression)); - } - - links.add(new LNewArray(line(ctx), location(ctx), ctx.identifier().getText(), arguments)); - } else { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); - } + return new LBrace(line(ctx), offset(ctx), location(ctx), expression); } @Override - public ANode visitLinknew(final LinknewContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - private void visitLinkstring(final LinkstringContext ctx, final List links) { - links.add(new LString(line(ctx), location(ctx), ctx.STRING().getText().substring(1, ctx.STRING().getText().length() - 1))); - - if (ctx.linkbrace() != null) { - visitLinkbrace(ctx.linkbrace(), links); - } else if (ctx.linkdot() != null) { - visitLinkdot(ctx.linkdot(), links); - } - } - - @Override - public ANode visitLinkstring(final LinkstringContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); - } - - @Override - public ANode visitArguments(final ArgumentsContext ctx) { - throw new IllegalStateException("Error " + location(ctx) + ": Unexpected state."); + public Object visitArguments(ArgumentsContext ctx) { + throw new IllegalStateException("Error " + location(ctx) + " Illegal tree structure."); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 528da4384c8..c46181817e2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.AnalyzerCaster; @@ -67,6 +65,12 @@ public abstract class AExpression extends ANode { */ protected boolean explicit = false; + /** + * Set to true if a cast is allowed to boxed/unboxed. This is used + * for method arguments because casting may be required. + */ + protected boolean internal = false; + /** * Set to the value of the constant this expression node represents if * and only if the node represents a constant. If this is not null @@ -94,34 +98,34 @@ public abstract class AExpression extends ANode { */ protected Label fals = null; - public AExpression(final int line, final String location) { - super(line, location); + public AExpression(int line, int offset, String location) { + super(line, offset, location); } /** * Checks for errors and collects data for the writing phase. */ - abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract void analyze(Variables variables); /** * Writes ASM based on the data collected during the analysis phase. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter writer); /** * Inserts {@link ECast} nodes into the tree for implicit casts. Also replaces * nodes with the constant variable set to a non-null value with {@link EConstant}. * @return The new child node for the parent node calling this method. */ - AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) { - final Cast cast = AnalyzerCaster.getLegalCast(definition, location, actual, expected, explicit); + AExpression cast(Variables variables) { + final Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); if (cast == null) { if (constant == null || this instanceof EConstant) { return this; } else { - final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + final EConstant econstant = new EConstant(line, offset, location, constant); + econstant.analyze(variables); if (!expected.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); @@ -131,7 +135,7 @@ public abstract class AExpression extends ANode { } } else { if (constant == null) { - final ECast ecast = new ECast(line, location, this, cast); + final ECast ecast = new ECast(line, offset, location, this, cast); ecast.statement = statement; ecast.actual = expected; ecast.isNull = isNull; @@ -141,8 +145,8 @@ public abstract class AExpression extends ANode { if (expected.sort.constant) { constant = AnalyzerCaster.constCast(location, constant, cast); - final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + final EConstant econstant = new EConstant(line, offset, location, constant); + econstant.analyze(variables); if (!expected.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); @@ -150,19 +154,19 @@ public abstract class AExpression extends ANode { return econstant; } else if (this instanceof EConstant) { - final ECast ecast = new ECast(line, location, this, cast); + final ECast ecast = new ECast(line, offset, location, this, cast); ecast.actual = expected; return ecast; } else { - final EConstant econstant = new EConstant(line, location, constant); - econstant.analyze(settings, definition, variables); + final EConstant econstant = new EConstant(line, offset, location, constant); + econstant.analyze(variables); if (!actual.equals(econstant.actual)) { throw new IllegalStateException(error("Illegal tree structure.")); } - final ECast ecast = new ECast(line, location, econstant, cast); + final ECast ecast = new ECast(line, offset, location, econstant, cast); ecast.actual = expected; return ecast; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java index ffbfff112b3..d68f662f913 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ALink.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -75,8 +73,8 @@ public abstract class ALink extends ANode { */ String string = null; - ALink(final int line, final String location, final int size) { - super(line, location); + ALink(int line, int offset, String location, int size) { + super(line, offset, location); this.size = size; } @@ -87,27 +85,27 @@ public abstract class ALink extends ANode { * def or a shortcut is used. Otherwise, returns itself. This will be * updated into the {@link EChain} node's list of links. */ - abstract ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract ALink analyze(Variables variables); /** * Write values before a load/store occurs such as an array index. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter writer); /** * Write a load for the specific link type. */ - abstract void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void load(MethodWriter writer); /** * Write a store for the specific link type. */ - abstract void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void store(MethodWriter writer); /** * Used to copy link data from one to another during analysis in the case of replacement. */ - final ALink copy(final ALink link) { + final ALink copy(ALink link) { load = link.load; store = link.store; statik = link.statik; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java index a61b002dfca..414f6afe038 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ANode.java @@ -28,32 +28,38 @@ import org.objectweb.asm.Label; public abstract class ANode { /** - * The line number in the original source used for debug messages. + * The line number in the original source used for debugging and errors. */ final int line; + /** + * The character offset in the original source used for debugging and errors. + */ + final int offset; + /** * The location in the original source to be printed in error messages. */ final String location; - ANode(final int line, final String location) { + ANode(int line, int offset, String location) { this.line = line; + this.offset = offset; this.location = location; } public String error(final String message) { return "Error " + location + ": " + message; } - - /** + + /** * Writes line number information *

* Currently we emit line number data for for leaf S-nodes */ - void writeDebugInfo(MethodWriter adapter) { + void writeDebugInfo(MethodWriter writer) { Label label = new Label(); - adapter.visitLabel(label); - adapter.visitLineNumber(line, label); + writer.visitLabel(label); + writer.visitLineNumber(line, label); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index 569e8cfb03b..80d24b4cab3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; import org.elasticsearch.painless.MethodWriter; @@ -109,17 +107,17 @@ public abstract class AStatement extends ANode { */ Label brake = null; - AStatement(final int line, final String location) { - super(line, location); + AStatement(int line, int offset, String location) { + super(line, offset, location); } /** * Checks for errors and collects data for the writing phase. */ - abstract void analyze(final CompilerSettings settings, final Definition definition, final Variables variables); + abstract void analyze(Variables variables); /** * Writes ASM based on the data collected during the analysis phase. */ - abstract void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter); + abstract void write(MethodWriter writer); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 07fe1ff3447..0c083111d66 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -39,8 +38,8 @@ public final class EBinary extends AExpression { boolean cat = false; - public EBinary(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { - super(line, location); + public EBinary(int line, int offset, String location, Operation operation, AExpression left, AExpression right) { + super(line, offset, location); this.operation = operation; this.left = left; @@ -48,39 +47,39 @@ public final class EBinary extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.MUL) { - analyzeMul(settings, definition, variables); + analyzeMul(variables); } else if (operation == Operation.DIV) { - analyzeDiv(settings, definition, variables); + analyzeDiv(variables); } else if (operation == Operation.REM) { - analyzeRem(settings, definition, variables); + analyzeRem(variables); } else if (operation == Operation.ADD) { - analyzeAdd(settings, definition, variables); + analyzeAdd(variables); } else if (operation == Operation.SUB) { - analyzeSub(settings, definition, variables); + analyzeSub(variables); } else if (operation == Operation.LSH) { - analyzeLSH(settings, definition, variables); + analyzeLSH(variables); } else if (operation == Operation.RSH) { - analyzeRSH(settings, definition, variables); + analyzeRSH(variables); } else if (operation == Operation.USH) { - analyzeUSH(settings, definition, variables); + analyzeUSH(variables); } else if (operation == Operation.BWAND) { - analyzeBWAnd(settings, definition, variables); + analyzeBWAnd(variables); } else if (operation == Operation.XOR) { - analyzeXor(settings, definition, variables); + analyzeXor(variables); } else if (operation == Operation.BWOR) { - analyzeBWOr(settings, definition, variables); + analyzeBWOr(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - private void analyzeMul(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeMul(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply multiply [*] to types " + @@ -90,25 +89,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant * (int)right.constant : - Math.multiplyExact((int)left.constant, (int)right.constant); + constant = (int)left.constant * (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant * (long)right.constant : - Math.multiplyExact((long)left.constant, (long)right.constant); + constant = (long)left.constant * (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant * (float)right.constant : - org.elasticsearch.painless.Utility.multiplyWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant * (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant * (double)right.constant : - org.elasticsearch.painless.Utility.multiplyWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant * (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -117,11 +111,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeDiv(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeDiv(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply divide [/] to types " + @@ -131,25 +125,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant / (int)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((int)left.constant, (int)right.constant); + constant = (int)left.constant / (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant / (long)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((long)left.constant, (long)right.constant); + constant = (long)left.constant / (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant / (float)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant / (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant / (double)right.constant : - org.elasticsearch.painless.Utility.divideWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant / (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -158,11 +147,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeRem(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeRem(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply remainder [%] to types " + @@ -172,23 +161,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant % (int)right.constant; } else if (sort == Sort.LONG) { constant = (long)left.constant % (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant % (float)right.constant : - org.elasticsearch.painless.Utility.remainderWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant % (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant % (double)right.constant : - org.elasticsearch.painless.Utility.remainderWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant % (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -197,18 +183,18 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeAdd(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeAdd(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteAdd(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteAdd(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply add [+] to types " + "[" + left.actual.name + "] and [" + right.actual.name + "].")); } - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.STRING) { left.expected = left.actual; @@ -227,24 +213,18 @@ public final class EBinary extends AExpression { right.expected = promote; } - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - if (sort == Sort.INT) { - constant = overflow ? (int)left.constant + (int)right.constant : - Math.addExact((int)left.constant, (int)right.constant); + constant = (int)left.constant + (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant + (long)right.constant : - Math.addExact((long)left.constant, (long)right.constant); + constant = (long)left.constant + (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant + (float)right.constant : - org.elasticsearch.painless.Utility.addWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant + (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant + (double)right.constant : - org.elasticsearch.painless.Utility.addWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant + (double)right.constant; } else if (sort == Sort.STRING) { constant = "" + left.constant + right.constant; } else { @@ -255,11 +235,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeSub(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeSub(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply subtract [-] to types " + @@ -269,25 +249,20 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? (int)left.constant - (int)right.constant : - Math.subtractExact((int)left.constant, (int)right.constant); + constant = (int)left.constant - (int)right.constant; } else if (sort == Sort.LONG) { - constant = overflow ? (long)left.constant - (long)right.constant : - Math.subtractExact((long)left.constant, (long)right.constant); + constant = (long)left.constant - (long)right.constant; } else if (sort == Sort.FLOAT) { - constant = overflow ? (float)left.constant - (float)right.constant : - org.elasticsearch.painless.Utility.subtractWithoutOverflow((float)left.constant, (float)right.constant); + constant = (float)left.constant - (float)right.constant; } else if (sort == Sort.DOUBLE) { - constant = overflow ? (double)left.constant - (double)right.constant : - org.elasticsearch.painless.Utility.subtractWithoutOverflow((double)left.constant, (double)right.constant); + constant = (double)left.constant - (double)right.constant; } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -296,11 +271,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeLSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply left shift [<<] to types " + @@ -308,14 +283,14 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant << (int)right.constant; @@ -329,11 +304,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeRSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeRSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply right shift [>>] to types " + @@ -341,14 +316,14 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant >> (int)right.constant; @@ -362,11 +337,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeUSH(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeUSH(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply unsigned shift [>>>] to types " + @@ -374,14 +349,14 @@ public final class EBinary extends AExpression { } left.expected = promote; - right.expected = definition.intType; + right.expected = Definition.INT_TYPE; right.explicit = true; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant >>> (int)right.constant; @@ -395,11 +370,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeBWAnd(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeBWAnd(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply and [&] to types " + @@ -409,11 +384,11 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant & (int)right.constant; @@ -427,11 +402,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeXor(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeXor(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteXor(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteXor(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply xor [^] to types " + @@ -441,11 +416,11 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.BOOL) { constant = (boolean)left.constant ^ (boolean)right.constant; @@ -461,11 +436,11 @@ public final class EBinary extends AExpression { actual = promote; } - private void analyzeBWOr(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeBWOr(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply or [|] to types " + @@ -475,11 +450,11 @@ public final class EBinary extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant | (int)right.constant; @@ -494,34 +469,34 @@ public final class EBinary extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { if (actual.sort == Sort.STRING && operation == Operation.ADD) { if (!cat) { - adapter.writeNewStrings(); + writer.writeNewStrings(); } - left.write(settings, definition, adapter); + left.write(writer); if (!(left instanceof EBinary) || ((EBinary)left).operation != Operation.ADD || left.actual.sort != Sort.STRING) { - adapter.writeAppendStrings(left.actual); + writer.writeAppendStrings(left.actual); } - right.write(settings, definition, adapter); + right.write(writer); if (!(right instanceof EBinary) || ((EBinary)right).operation != Operation.ADD || right.actual.sort != Sort.STRING) { - adapter.writeAppendStrings(right.actual); + writer.writeAppendStrings(right.actual); } if (!cat) { - adapter.writeToStrings(); + writer.writeToStrings(); } } else { - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(writer); + right.write(writer); - adapter.writeBinaryInstruction(settings, definition, location, actual, operation); + writer.writeBinaryInstruction(location, actual, operation); } - adapter.writeBranch(tru, fals); + writer.writeBranch(tru, fals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index 7f9f7dee000..eda3e364795 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.Variables; @@ -35,8 +34,8 @@ public final class EBool extends AExpression { AExpression left; AExpression right; - public EBool(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { - super(line, location); + public EBool(int line, int offset, String location, Operation operation, AExpression left, AExpression right) { + super(line, offset, location); this.operation = operation; this.left = left; @@ -44,14 +43,14 @@ public final class EBool extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.expected = definition.booleanType; - left.analyze(settings, definition, variables); - left = left.cast(settings, definition, variables); + void analyze(Variables variables) { + left.expected = Definition.BOOLEAN_TYPE; + left.analyze(variables); + left = left.cast(variables); - right.expected = definition.booleanType; - right.analyze(settings, definition, variables); - right = right.cast(settings, definition, variables); + right.expected = Definition.BOOLEAN_TYPE; + right.analyze(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { if (operation == Operation.AND) { @@ -63,74 +62,74 @@ public final class EBool extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { if (tru != null || fals != null) { if (operation == Operation.AND) { - final Label localfals = fals == null ? new Label() : fals; + Label localfals = fals == null ? new Label() : fals; left.fals = localfals; right.tru = tru; right.fals = fals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(writer); + right.write(writer); if (fals == null) { - adapter.mark(localfals); + writer.mark(localfals); } } else if (operation == Operation.OR) { - final Label localtru = tru == null ? new Label() : tru; + Label localtru = tru == null ? new Label() : tru; left.tru = localtru; right.tru = tru; right.fals = fals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(writer); + right.write(writer); if (tru == null) { - adapter.mark(localtru); + writer.mark(localtru); } } else { throw new IllegalStateException(error("Illegal tree structure.")); } } else { if (operation == Operation.AND) { - final Label localfals = new Label(); - final Label end = new Label(); + Label localfals = new Label(); + Label end = new Label(); left.fals = localfals; right.fals = localfals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(writer); + right.write(writer); - adapter.push(true); - adapter.goTo(end); - adapter.mark(localfals); - adapter.push(false); - adapter.mark(end); + writer.push(true); + writer.goTo(end); + writer.mark(localfals); + writer.push(false); + writer.mark(end); } else if (operation == Operation.OR) { - final Label localtru = new Label(); - final Label localfals = new Label(); - final Label end = new Label(); + Label localtru = new Label(); + Label localfals = new Label(); + Label end = new Label(); left.tru = localtru; right.fals = localfals; - left.write(settings, definition, adapter); - right.write(settings, definition, adapter); + left.write(writer); + right.write(writer); - adapter.mark(localtru); - adapter.push(true); - adapter.goTo(end); - adapter.mark(localfals); - adapter.push(false); - adapter.mark(end); + writer.mark(localtru); + writer.push(true); + writer.goTo(end); + writer.mark(localfals); + writer.push(false); + writer.mark(end); } else { throw new IllegalStateException(error("Illegal tree structure.")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index 9b9f0917546..a1585e13dbc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,19 +28,19 @@ import org.elasticsearch.painless.MethodWriter; */ public final class EBoolean extends AExpression { - public EBoolean(final int line, final String location, final boolean constant) { - super(line, location); + public EBoolean(int line, int offset, String location, boolean constant) { + super(line, offset, location); this.constant = constant; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - actual = definition.booleanType; + void analyze(Variables variables) { + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter adapter) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index d68c95c910a..e2fe5cb6d10 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -36,8 +34,8 @@ final class ECast extends AExpression { Cast cast = null; - ECast(final int line, final String location, final AExpression child, final Cast cast) { - super(line, location); + ECast(int line, int offset, String location, AExpression child, Cast cast) { + super(line, offset, location); this.type = null; this.child = child; @@ -46,14 +44,14 @@ final class ECast extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { throw new IllegalStateException(error("Illegal tree structure.")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - child.write(settings, definition, adapter); - adapter.writeCast(cast); - adapter.writeBranch(tru, fals); + void write(MethodWriter writer) { + child.write(writer); + writer.writeCast(cast); + writer.writeBranch(tru, fals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java index 39afcd935ad..2de1a5b320e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EChain.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Sort; @@ -44,13 +43,12 @@ public final class EChain extends AExpression { boolean cat = false; Type promote = null; - boolean exact = false; Cast there = null; Cast back = null; - public EChain(final int line, final String location, final List links, - final boolean pre, final boolean post, final Operation operation, final AExpression expression) { - super(line, location); + public EChain(int line, int offset, String location, List links, + boolean pre, boolean post, Operation operation, AExpression expression) { + super(line, offset, location); this.links = links; this.pre = pre; @@ -60,25 +58,25 @@ public final class EChain extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - analyzeLinks(settings, definition, variables); + void analyze(Variables variables) { + analyzeLinks(variables); analyzeIncrDecr(); if (operation != null) { - analyzeCompound(settings, definition, variables); + analyzeCompound(variables); } else if (expression != null) { - analyzeWrite(settings, definition, variables); + analyzeWrite(variables); } else { analyzeRead(); } } - private void analyzeLinks(final CompilerSettings settings, final Definition definition, final Variables variables) { + private void analyzeLinks(Variables variables) { ALink previous = null; int index = 0; while (index < links.size()) { - final ALink current = links.get(index); + ALink current = links.get(index); if (previous != null) { current.before = previous.after; @@ -93,7 +91,7 @@ public final class EChain extends AExpression { current.store = expression != null || pre || post; } - final ALink analyzed = current.analyze(settings, definition, variables); + ALink analyzed = current.analyze(variables); if (analyzed == null) { links.remove(index); @@ -113,7 +111,7 @@ public final class EChain extends AExpression { } private void analyzeIncrDecr() { - final ALink last = links.get(links.size() - 1); + ALink last = links.get(links.size() - 1); if (pre && post) { throw new IllegalStateException(error("Illegal tree structure.")); @@ -122,29 +120,29 @@ public final class EChain extends AExpression { throw new IllegalStateException(error("Illegal tree structure.")); } - final Sort sort = last.after.sort; + Sort sort = last.after.sort; if (operation == Operation.INCR) { if (sort == Sort.DOUBLE) { - expression = new EConstant(line, location, 1D); + expression = new EConstant(line, offset, location, 1D); } else if (sort == Sort.FLOAT) { - expression = new EConstant(line, location, 1F); + expression = new EConstant(line, offset, location, 1F); } else if (sort == Sort.LONG) { - expression = new EConstant(line, location, 1L); + expression = new EConstant(line, offset, location, 1L); } else { - expression = new EConstant(line, location, 1); + expression = new EConstant(line, offset, location, 1); } operation = Operation.ADD; } else if (operation == Operation.DECR) { if (sort == Sort.DOUBLE) { - expression = new EConstant(line, location, 1D); + expression = new EConstant(line, offset, location, 1D); } else if (sort == Sort.FLOAT) { - expression = new EConstant(line, location, 1F); + expression = new EConstant(line, offset, location, 1F); } else if (sort == Sort.LONG) { - expression = new EConstant(line, location, 1L); + expression = new EConstant(line, offset, location, 1L); } else { - expression = new EConstant(line, location, 1); + expression = new EConstant(line, offset, location, 1); } operation = Operation.SUB; @@ -154,33 +152,33 @@ public final class EChain extends AExpression { } } - private void analyzeCompound(final CompilerSettings settings, final Definition definition, final Variables variables) { - final ALink last = links.get(links.size() - 1); + private void analyzeCompound(Variables variables) { + ALink last = links.get(links.size() - 1); - expression.analyze(settings, definition, variables); + expression.analyze(variables); if (operation == Operation.MUL) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.DIV) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.REM) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.ADD) { - promote = AnalyzerCaster.promoteAdd(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteAdd(last.after, expression.actual); } else if (operation == Operation.SUB) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, expression.actual, true, true); + promote = AnalyzerCaster.promoteNumeric(last.after, expression.actual, true); } else if (operation == Operation.LSH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.RSH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.USH) { - promote = AnalyzerCaster.promoteNumeric(definition, last.after, false, true); + promote = AnalyzerCaster.promoteNumeric(last.after, false); } else if (operation == Operation.BWAND) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else if (operation == Operation.XOR) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else if (operation == Operation.BWOR) { - promote = AnalyzerCaster.promoteXor(definition, last.after, expression.actual); + promote = AnalyzerCaster.promoteXor(last.after, expression.actual); } else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -200,48 +198,45 @@ public final class EChain extends AExpression { expression.expected = expression.actual; } else if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) { - expression.expected = definition.intType; + expression.expected = Definition.INT_TYPE; expression.explicit = true; } else { expression.expected = promote; } - expression = expression.cast(settings, definition, variables); + expression = expression.cast(variables); - exact = !settings.getNumericOverflow() && - (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM || - operation == Operation.ADD || operation == Operation.SUB); - there = AnalyzerCaster.getLegalCast(definition, location, last.after, promote, false); - back = AnalyzerCaster.getLegalCast(definition, location, promote, last.after, true); + there = AnalyzerCaster.getLegalCast(location, last.after, promote, false, false); + back = AnalyzerCaster.getLegalCast(location, promote, last.after, true, false); this.statement = true; - this.actual = read ? last.after : definition.voidType; + this.actual = read ? last.after : Definition.VOID_TYPE; } - private void analyzeWrite(final CompilerSettings settings, final Definition definition, final Variables variables) { - final ALink last = links.get(links.size() - 1); + private void analyzeWrite(Variables variables) { + ALink last = links.get(links.size() - 1); - // If the store node is a DEF node, we remove the cast to DEF from the expression + // If the store node is a def node, we remove the cast to def from the expression // and promote the real type to it: if (last instanceof IDefLink) { - expression.analyze(settings, definition, variables); + expression.analyze(variables); last.after = expression.expected = expression.actual; } else { // otherwise we adapt the type of the expression to the store type expression.expected = last.after; - expression.analyze(settings, definition, variables); + expression.analyze(variables); } - expression = expression.cast(settings, definition, variables); + expression = expression.cast(variables); this.statement = true; - this.actual = read ? last.after : definition.voidType; + this.actual = read ? last.after : Definition.VOID_TYPE; } private void analyzeRead() { - final ALink last = links.get(links.size() - 1); + ALink last = links.get(links.size() - 1); - // If the load node is a DEF node, we adapt its after type to use _this_ expected output type: + // If the load node is a def node, we adapt its after type to use _this_ expected output type: if (last instanceof IDefLink && this.expected != null) { last.after = this.expected; } @@ -252,72 +247,70 @@ public final class EChain extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { if (cat) { - adapter.writeNewStrings(); + writer.writeNewStrings(); } - final ALink last = links.get(links.size() - 1); + ALink last = links.get(links.size() - 1); - for (final ALink link : links) { - link.write(settings, definition, adapter); + for (ALink link : links) { + link.write(writer); if (link == last && link.store) { if (cat) { - adapter.writeDup(link.size, 1); - link.load(settings, definition, adapter); - adapter.writeAppendStrings(link.after); + writer.writeDup(link.size, 1); + link.load(writer); + writer.writeAppendStrings(link.after); - expression.write(settings, definition, adapter); + expression.write(writer); if (!(expression instanceof EBinary) || ((EBinary)expression).operation != Operation.ADD || expression.actual.sort != Sort.STRING) { - adapter.writeAppendStrings(expression.actual); + writer.writeAppendStrings(expression.actual); } - adapter.writeToStrings(); - adapter.writeCast(back); + writer.writeToStrings(); + writer.writeCast(back); if (link.load) { - adapter.writeDup(link.after.sort.size, link.size); + writer.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(writer); } else if (operation != null) { - adapter.writeDup(link.size, 0); - link.load(settings, definition, adapter); + writer.writeDup(link.size, 0); + link.load(writer); if (link.load && post) { - adapter.writeDup(link.after.sort.size, link.size); + writer.writeDup(link.after.sort.size, link.size); } - adapter.writeCast(there); - expression.write(settings, definition, adapter); - adapter.writeBinaryInstruction(settings, definition, location, promote, operation); + writer.writeCast(there); + expression.write(writer); + writer.writeBinaryInstruction(location, promote, operation); - if (!exact || !adapter.writeExactInstruction(definition, promote.sort, link.after.sort)) { - adapter.writeCast(back); - } + writer.writeCast(back); if (link.load && !post) { - adapter.writeDup(link.after.sort.size, link.size); + writer.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(writer); } else { - expression.write(settings, definition, adapter); + expression.write(writer); if (link.load) { - adapter.writeDup(link.after.sort.size, link.size); + writer.writeDup(link.after.sort.size, link.size); } - link.store(settings, definition, adapter); + link.store(writer); } } else { - link.load(settings, definition, adapter); + link.load(writer); } } - adapter.writeBranch(tru, fals); + writer.writeBranch(tru, fals); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index d9337ae562b..4521c76d911 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -35,6 +34,8 @@ import static org.elasticsearch.painless.WriterConstants.DEF_GTE_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_GT_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_LTE_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_LT_CALL; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; +import static org.elasticsearch.painless.WriterConstants.UTILITY_TYPE; /** * Represents a comparison expression. @@ -45,8 +46,8 @@ public final class EComp extends AExpression { AExpression left; AExpression right; - public EComp(final int line, final String location, final Operation operation, final AExpression left, final AExpression right) { - super(line, location); + public EComp(int line, int offset, String location, Operation operation, AExpression left, AExpression right) { + super(line, offset, location); this.operation = operation; this.left = left; @@ -54,33 +55,33 @@ public final class EComp extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.EQ) { - analyzeEq(settings, definition, variables); + analyzeEq(variables); } else if (operation == Operation.EQR) { - analyzeEqR(settings, definition, variables); + analyzeEqR(variables); } else if (operation == Operation.NE) { - analyzeNE(settings, definition, variables); + analyzeNE(variables); } else if (operation == Operation.NER) { - analyzeNER(settings, definition, variables); + analyzeNER(variables); } else if (operation == Operation.GTE) { - analyzeGTE(settings, definition, variables); + analyzeGTE(variables); } else if (operation == Operation.GT) { - analyzeGT(settings, definition, variables); + analyzeGT(variables); } else if (operation == Operation.LTE) { - analyzeLTE(settings, definition, variables); + analyzeLTE(variables); } else if (operation == Operation.LT) { - analyzeLT(settings, definition, variables); + analyzeLT(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - private void analyzeEq(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeEq(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply equals [==] to types " + @@ -90,15 +91,15 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); } if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.BOOL) { constant = (boolean)left.constant == (boolean)right.constant; @@ -119,14 +120,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeEqR(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeEqR(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply reference equals [===] to types " + @@ -136,15 +137,15 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); } if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.BOOL) { constant = (boolean)left.constant == (boolean)right.constant; @@ -161,14 +162,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeNE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeNE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteEquality(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply not equals [!=] to types " + @@ -178,15 +179,15 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); } if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.BOOL) { constant = (boolean)left.constant != (boolean)right.constant; @@ -207,14 +208,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeNER(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeNER(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteReference(definition, left.actual, right.actual); + Type promote = AnalyzerCaster.promoteEquality(left.actual, right.actual); if (promote == null) { throw new ClassCastException(error("Cannot apply reference not equals [!==] to types " + @@ -224,15 +225,15 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.isNull && right.isNull) { throw new IllegalArgumentException(error("Extraneous comparison of null constants.")); } if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.BOOL) { constant = (boolean)left.constant != (boolean)right.constant; @@ -249,14 +250,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeGTE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeGTE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply greater than or equals [>=] to types " + @@ -266,11 +267,11 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant >= (int)right.constant; @@ -285,14 +286,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeGT(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeGT(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply greater than [>] to types " + @@ -302,11 +303,11 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant > (int)right.constant; @@ -321,14 +322,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeLTE(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLTE(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply less than or equals [<=] to types " + @@ -338,11 +339,11 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant <= (int)right.constant; @@ -357,14 +358,14 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - private void analyzeLT(final CompilerSettings settings, final Definition definition, final Variables variables) { - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + private void analyzeLT(Variables variables) { + left.analyze(variables); + right.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, left.actual, right.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply less than [>=] to types " + @@ -374,11 +375,11 @@ public final class EComp extends AExpression { left.expected = promote; right.expected = promote; - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); if (left.constant != null && right.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = (int)left.constant < (int)right.constant; @@ -393,32 +394,32 @@ public final class EComp extends AExpression { } } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final boolean branch = tru != null || fals != null; - final org.objectweb.asm.Type rtype = right.actual.type; - final Sort rsort = right.actual.sort; + void write(MethodWriter writer) { + boolean branch = tru != null || fals != null; + org.objectweb.asm.Type rtype = right.actual.type; + Sort rsort = right.actual.sort; - left.write(settings, definition, adapter); + left.write(writer); if (!right.isNull) { - right.write(settings, definition, adapter); + right.write(writer); } - final Label jump = tru != null ? tru : fals != null ? fals : new Label(); - final Label end = new Label(); + Label jump = tru != null ? tru : fals != null ? fals : new Label(); + Label end = new Label(); - final boolean eq = (operation == Operation.EQ || operation == Operation.EQR) && (tru != null || fals == null) || + boolean eq = (operation == Operation.EQ || operation == Operation.EQR) && (tru != null || fals == null) || (operation == Operation.NE || operation == Operation.NER) && fals != null; - final boolean ne = (operation == Operation.NE || operation == Operation.NER) && (tru != null || fals == null) || + boolean ne = (operation == Operation.NE || operation == Operation.NER) && (tru != null || fals == null) || (operation == Operation.EQ || operation == Operation.EQR) && fals != null; - final boolean lt = operation == Operation.LT && (tru != null || fals == null) || operation == Operation.GTE && fals != null; - final boolean lte = operation == Operation.LTE && (tru != null || fals == null) || operation == Operation.GT && fals != null; - final boolean gt = operation == Operation.GT && (tru != null || fals == null) || operation == Operation.LTE && fals != null; - final boolean gte = operation == Operation.GTE && (tru != null || fals == null) || operation == Operation.LT && fals != null; + boolean lt = operation == Operation.LT && (tru != null || fals == null) || operation == Operation.GTE && fals != null; + boolean lte = operation == Operation.LTE && (tru != null || fals == null) || operation == Operation.GT && fals != null; + boolean gt = operation == Operation.GT && (tru != null || fals == null) || operation == Operation.LTE && fals != null; + boolean gte = operation == Operation.GTE && (tru != null || fals == null) || operation == Operation.LT && fals != null; boolean writejump = true; @@ -429,8 +430,8 @@ public final class EComp extends AExpression { case CHAR: throw new IllegalStateException(error("Illegal tree structure.")); case BOOL: - if (eq) adapter.ifZCmp(MethodWriter.EQ, jump); - else if (ne) adapter.ifZCmp(MethodWriter.NE, jump); + if (eq) writer.ifZCmp(MethodWriter.EQ, jump); + else if (ne) writer.ifZCmp(MethodWriter.NE, jump); else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -440,12 +441,12 @@ public final class EComp extends AExpression { case LONG: case FLOAT: case DOUBLE: - if (eq) adapter.ifCmp(rtype, MethodWriter.EQ, jump); - else if (ne) adapter.ifCmp(rtype, MethodWriter.NE, jump); - else if (lt) adapter.ifCmp(rtype, MethodWriter.LT, jump); - else if (lte) adapter.ifCmp(rtype, MethodWriter.LE, jump); - else if (gt) adapter.ifCmp(rtype, MethodWriter.GT, jump); - else if (gte) adapter.ifCmp(rtype, MethodWriter.GE, jump); + if (eq) writer.ifCmp(rtype, MethodWriter.EQ, jump); + else if (ne) writer.ifCmp(rtype, MethodWriter.NE, jump); + else if (lt) writer.ifCmp(rtype, MethodWriter.LT, jump); + else if (lte) writer.ifCmp(rtype, MethodWriter.LE, jump); + else if (gt) writer.ifCmp(rtype, MethodWriter.GT, jump); + else if (gte) writer.ifCmp(rtype, MethodWriter.GE, jump); else { throw new IllegalStateException(error("Illegal tree structure.")); } @@ -454,63 +455,66 @@ public final class EComp extends AExpression { case DEF: if (eq) { if (right.isNull) { - adapter.ifNull(jump); - } else if (!left.isNull && operation == Operation.EQ) { - adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); + writer.ifNull(jump); + } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + writer.invokeStatic(DEF_UTIL_TYPE, DEF_EQ_CALL); + writejump = false; } else { - adapter.ifCmp(rtype, MethodWriter.EQ, jump); + writer.ifCmp(rtype, MethodWriter.EQ, jump); } } else if (ne) { if (right.isNull) { - adapter.ifNonNull(jump); - } else if (!left.isNull && operation == Operation.NE) { - adapter.invokeStatic(definition.defobjType.type, DEF_EQ_CALL); - adapter.ifZCmp(MethodWriter.EQ, jump); + writer.ifNonNull(jump); + } else if (!left.isNull && (operation == Operation.EQ || operation == Operation.NE)) { + writer.invokeStatic(DEF_UTIL_TYPE, DEF_EQ_CALL); + writer.ifZCmp(MethodWriter.EQ, jump); } else { - adapter.ifCmp(rtype, MethodWriter.NE, jump); + writer.ifCmp(rtype, MethodWriter.NE, jump); } } else if (lt) { - adapter.invokeStatic(definition.defobjType.type, DEF_LT_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_LT_CALL); + writejump = false; } else if (lte) { - adapter.invokeStatic(definition.defobjType.type, DEF_LTE_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_LTE_CALL); + writejump = false; } else if (gt) { - adapter.invokeStatic(definition.defobjType.type, DEF_GT_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_GT_CALL); + writejump = false; } else if (gte) { - adapter.invokeStatic(definition.defobjType.type, DEF_GTE_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_GTE_CALL); + writejump = false; } else { throw new IllegalStateException(error("Illegal tree structure.")); } - writejump = left.isNull || ne || operation == Operation.EQR; - if (branch && !writejump) { - adapter.ifZCmp(MethodWriter.NE, jump); + writer.ifZCmp(MethodWriter.NE, jump); } break; default: if (eq) { if (right.isNull) { - adapter.ifNull(jump); - } else if (operation == Operation.EQ) { - adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS); + writer.ifNull(jump); + } else if (operation == Operation.EQ || operation == Operation.NE) { + writer.invokeStatic(UTILITY_TYPE, CHECKEQUALS); if (branch) { - adapter.ifZCmp(MethodWriter.NE, jump); + writer.ifZCmp(MethodWriter.NE, jump); } writejump = false; } else { - adapter.ifCmp(rtype, MethodWriter.EQ, jump); + writer.ifCmp(rtype, MethodWriter.EQ, jump); } } else if (ne) { if (right.isNull) { - adapter.ifNonNull(jump); - } else if (operation == Operation.NE) { - adapter.invokeStatic(definition.utilityType.type, CHECKEQUALS); - adapter.ifZCmp(MethodWriter.EQ, jump); + writer.ifNonNull(jump); + } else if (operation == Operation.EQ || operation == Operation.NE) { + writer.invokeStatic(UTILITY_TYPE, CHECKEQUALS); + writer.ifZCmp(MethodWriter.EQ, jump); } else { - adapter.ifCmp(rtype, MethodWriter.NE, jump); + writer.ifCmp(rtype, MethodWriter.NE, jump); } } else { throw new IllegalStateException(error("Illegal tree structure.")); @@ -518,11 +522,11 @@ public final class EComp extends AExpression { } if (!branch && writejump) { - adapter.push(false); - adapter.goTo(end); - adapter.mark(jump); - adapter.push(true); - adapter.mark(end); + writer.push(false); + writer.goTo(end); + writer.mark(jump); + writer.push(true); + writer.mark(end); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index 5853fa3242e..a13176e56b7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.AnalyzerCaster; @@ -36,9 +35,8 @@ public final class EConditional extends AExpression { AExpression left; AExpression right; - public EConditional(final int line, final String location, - final AExpression condition, final AExpression left, final AExpression right) { - super(line, location); + public EConditional(int line, int offset, String location, AExpression condition, AExpression left, AExpression right) { + super(line, offset, location); this.condition = condition; this.left = left; @@ -46,10 +44,10 @@ public final class EConditional extends AExpression { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + void analyze(Variables variables) { + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { throw new IllegalArgumentException(error("Extraneous conditional statement.")); @@ -57,39 +55,41 @@ public final class EConditional extends AExpression { left.expected = expected; left.explicit = explicit; + left.internal = internal; right.expected = expected; right.explicit = explicit; + right.internal = internal; actual = expected; - left.analyze(settings, definition, variables); - right.analyze(settings, definition, variables); + left.analyze(variables); + right.analyze(variables); if (expected == null) { - final Type promote = AnalyzerCaster.promoteConditional(definition, left.actual, right.actual, left.constant, right.constant); + final Type promote = AnalyzerCaster.promoteConditional(left.actual, right.actual, left.constant, right.constant); left.expected = promote; right.expected = promote; actual = promote; } - left = left.cast(settings, definition, variables); - right = right.cast(settings, definition, variables); + left = left.cast(variables); + right = right.cast(variables); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final Label localfals = new Label(); - final Label end = new Label(); + void write(MethodWriter writer) { + Label localfals = new Label(); + Label end = new Label(); condition.fals = localfals; left.tru = right.tru = tru; left.fals = right.fals = fals; - condition.write(settings, definition, adapter); - left.write(settings, definition, adapter); - adapter.goTo(end); - adapter.mark(localfals); - right.write(settings, definition, adapter); - adapter.mark(end); + condition.write(writer); + left.write(writer); + writer.goTo(end); + writer.mark(localfals); + right.write(writer); + writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index 7afa88ffc9a..b45e3c33bc2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -31,57 +30,57 @@ import org.elasticsearch.painless.MethodWriter; */ final class EConstant extends AExpression { - EConstant(final int line, final String location, final Object constant) { - super(line, location); + EConstant(int line, int offset, String location, Object constant) { + super(line, offset, location); this.constant = constant; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (constant instanceof String) { - actual = definition.stringType; + actual = Definition.STRING_TYPE; } else if (constant instanceof Double) { - actual = definition.doubleType; + actual = Definition.DOUBLE_TYPE; } else if (constant instanceof Float) { - actual = definition.floatType; + actual = Definition.FLOAT_TYPE; } else if (constant instanceof Long) { - actual = definition.longType; + actual = Definition.LONG_TYPE; } else if (constant instanceof Integer) { - actual = definition.intType; + actual = Definition.INT_TYPE; } else if (constant instanceof Character) { - actual = definition.charType; + actual = Definition.CHAR_TYPE; } else if (constant instanceof Short) { - actual = definition.shortType; + actual = Definition.SHORT_TYPE; } else if (constant instanceof Byte) { - actual = definition.byteType; + actual = Definition.BYTE_TYPE; } else if (constant instanceof Boolean) { - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } else { throw new IllegalStateException(error("Illegal tree structure.")); } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final Sort sort = actual.sort; + void write(MethodWriter writer) { + Sort sort = actual.sort; switch (sort) { - case STRING: adapter.push((String)constant); break; - case DOUBLE: adapter.push((double)constant); break; - case FLOAT: adapter.push((float)constant); break; - case LONG: adapter.push((long)constant); break; - case INT: adapter.push((int)constant); break; - case CHAR: adapter.push((char)constant); break; - case SHORT: adapter.push((short)constant); break; - case BYTE: adapter.push((byte)constant); break; + case STRING: writer.push((String)constant); break; + case DOUBLE: writer.push((double)constant); break; + case FLOAT: writer.push((float)constant); break; + case LONG: writer.push((long)constant); break; + case INT: writer.push((int)constant); break; + case CHAR: writer.push((char)constant); break; + case SHORT: writer.push((short)constant); break; + case BYTE: writer.push((byte)constant); break; case BOOL: if (tru != null && (boolean)constant) { - adapter.goTo(tru); + writer.goTo(tru); } else if (fals != null && !(boolean)constant) { - adapter.goTo(fals); + writer.goTo(fals); } else if (tru == null && fals == null) { - adapter.push((boolean)constant); + writer.push((boolean)constant); } break; @@ -90,7 +89,7 @@ final class EConstant extends AExpression { } if (sort != Sort.BOOL) { - adapter.writeBranch(tru, fals); + writer.writeBranch(tru, fals); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 7583d3eb158..034a4ec97f9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,33 +30,33 @@ public final class EDecimal extends AExpression { final String value; - public EDecimal(final int line, final String location, final String value) { - super(line, location); + public EDecimal(int line, int offset, String location, String value) { + super(line, offset, location); this.value = value; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (value.endsWith("f") || value.endsWith("F")) { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - actual = definition.floatType; - } catch (final NumberFormatException exception) { + actual = Definition.FLOAT_TYPE; + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid float constant [" + value + "].")); } } else { try { constant = Double.parseDouble(value); - actual = definition.doubleType; - } catch (final NumberFormatException exception) { + actual = Definition.DOUBLE_TYPE; + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid double constant [" + value + "].")); } } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index ac0b06c0a79..643dc2d4141 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -32,36 +31,37 @@ public final class EExplicit extends AExpression { final String type; AExpression child; - public EExplicit(final int line, final String location, final String type, final AExpression child) { - super(line, location); + public EExplicit(int line, int offset, String location, String type, AExpression child) { + super(line, offset, location); this.type = type; this.child = child; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { try { - actual = definition.getType(this.type); - } catch (final IllegalArgumentException exception) { + actual = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } child.expected = actual; child.explicit = true; - child.analyze(settings, definition, variables); - child = child.cast(settings, definition, variables); + child.analyze(variables); + child = child.cast(variables); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { throw new IllegalArgumentException(error("Illegal tree structure.")); } - AExpression cast(final CompilerSettings settings, final Definition definition, final Variables variables) { + AExpression cast(Variables variables) { child.expected = expected; child.explicit = explicit; + child.internal = internal; - return child.cast(settings, definition, variables); + return child.cast(variables); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 0c8500c528b..2bb4f4ba585 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Opcodes; @@ -30,12 +29,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class ENull extends AExpression { - public ENull(final int line, final String location) { - super(line, location); + public ENull(int line, int offset, String location) { + super(line, offset, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { isNull = true; if (expected != null) { @@ -45,12 +44,12 @@ public final class ENull extends AExpression { actual = expected; } else { - actual = definition.objectType; + actual = Definition.OBJECT_TYPE; } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.visitInsn(Opcodes.ACONST_NULL); + void write(MethodWriter writer) { + writer.visitInsn(Opcodes.ACONST_NULL); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index ed7314b3571..0fed456dd64 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -33,15 +32,15 @@ public final class ENumeric extends AExpression { final String value; int radix; - public ENumeric(final int line, final String location, final String value, final int radix) { - super(line, location); + public ENumeric(int line, int offset, String location, String value, int radix) { + super(line, offset, location); this.value = value; this.radix = radix; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (value.endsWith("d") || value.endsWith("D")) { if (radix != 10) { throw new IllegalStateException(error("Invalid tree structure.")); @@ -49,8 +48,8 @@ public final class ENumeric extends AExpression { try { constant = Double.parseDouble(value.substring(0, value.length() - 1)); - actual = definition.doubleType; - } catch (final NumberFormatException exception) { + actual = Definition.DOUBLE_TYPE; + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid double constant [" + value + "].")); } } else if (value.endsWith("f") || value.endsWith("F")) { @@ -60,43 +59,43 @@ public final class ENumeric extends AExpression { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - actual = definition.floatType; - } catch (final NumberFormatException exception) { + actual = Definition.FLOAT_TYPE; + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid float constant [" + value + "].")); } } else if (value.endsWith("l") || value.endsWith("L")) { try { constant = Long.parseLong(value.substring(0, value.length() - 1), radix); - actual = definition.longType; - } catch (final NumberFormatException exception) { + actual = Definition.LONG_TYPE; + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid long constant [" + value + "].")); } } else { try { - final Sort sort = expected == null ? Sort.INT : expected.sort; - final int integer = Integer.parseInt(value, radix); + Sort sort = expected == null ? Sort.INT : expected.sort; + int integer = Integer.parseInt(value, radix); if (sort == Sort.BYTE && integer >= Byte.MIN_VALUE && integer <= Byte.MAX_VALUE) { constant = (byte)integer; - actual = definition.byteType; + actual = Definition.BYTE_TYPE; } else if (sort == Sort.CHAR && integer >= Character.MIN_VALUE && integer <= Character.MAX_VALUE) { constant = (char)integer; - actual = definition.charType; + actual = Definition.CHAR_TYPE; } else if (sort == Sort.SHORT && integer >= Short.MIN_VALUE && integer <= Short.MAX_VALUE) { constant = (short)integer; - actual = definition.shortType; + actual = Definition.SHORT_TYPE; } else { constant = integer; - actual = definition.intType; + actual = Definition.INT_TYPE; } - } catch (final NumberFormatException exception) { + } catch (NumberFormatException exception) { throw new IllegalArgumentException(error("Invalid int constant [" + value + "].")); } } } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { throw new IllegalArgumentException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index f4205223a65..8b02522fca9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; @@ -31,65 +30,64 @@ import org.elasticsearch.painless.MethodWriter; import static org.elasticsearch.painless.WriterConstants.DEF_NEG_CALL; import static org.elasticsearch.painless.WriterConstants.DEF_NOT_CALL; -import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_INT; -import static org.elasticsearch.painless.WriterConstants.NEGATEEXACT_LONG; +import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; /** * Represents a unary math expression. */ public final class EUnary extends AExpression { - Operation operation; + final Operation operation; AExpression child; - public EUnary(final int line, final String location, final Operation operation, final AExpression child) { - super(line, location); + public EUnary(int line, int offset, String location, Operation operation, AExpression child) { + super(line, offset, location); this.operation = operation; this.child = child; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (operation == Operation.NOT) { - analyzeNot(settings, definition, variables); + analyzeNot(variables); } else if (operation == Operation.BWNOT) { - analyzeBWNot(settings, definition, variables); + analyzeBWNot(variables); } else if (operation == Operation.ADD) { - analyzerAdd(settings, definition, variables); + analyzerAdd(variables); } else if (operation == Operation.SUB) { - analyzerSub(settings, definition, variables); + analyzerSub(variables); } else { throw new IllegalStateException(error("Illegal tree structure.")); } } - void analyzeNot(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.expected = definition.booleanType; - child.analyze(settings, definition, variables); - child = child.cast(settings, definition, variables); + void analyzeNot(Variables variables) { + child.expected = Definition.BOOLEAN_TYPE; + child.analyze(variables); + child = child.cast(variables); if (child.constant != null) { constant = !(boolean)child.constant; } - actual = definition.booleanType; + actual = Definition.BOOLEAN_TYPE; } - void analyzeBWNot(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzeBWNot(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, false, true); + Type promote = AnalyzerCaster.promoteNumeric(child.actual, false); if (promote == null) { throw new ClassCastException(error("Cannot apply not [~] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = ~(int)child.constant; @@ -103,20 +101,20 @@ public final class EUnary extends AExpression { actual = promote; } - void analyzerAdd(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzerAdd(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply positive [+] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { - final Sort sort = promote.sort; + Sort sort = promote.sort; if (sort == Sort.INT) { constant = +(int)child.constant; @@ -134,27 +132,25 @@ public final class EUnary extends AExpression { actual = promote; } - void analyzerSub(final CompilerSettings settings, final Definition definition, final Variables variables) { - child.analyze(settings, definition, variables); + void analyzerSub(Variables variables) { + child.analyze(variables); - final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true, true); + Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { throw new ClassCastException(error("Cannot apply negative [-] to type [" + child.actual.name + "].")); } child.expected = promote; - child = child.cast(settings, definition, variables); + child = child.cast(variables); if (child.constant != null) { - final boolean overflow = settings.getNumericOverflow(); - final Sort sort = promote.sort; - + Sort sort = promote.sort; if (sort == Sort.INT) { - constant = overflow ? -(int)child.constant : Math.negateExact((int)child.constant); + constant = -(int)child.constant; } else if (sort == Sort.LONG) { - constant = overflow ? -(long)child.constant : Math.negateExact((long)child.constant); + constant = -(long)child.constant; } else if (sort == Sort.FLOAT) { constant = -(float)child.constant; } else if (sort == Sort.DOUBLE) { @@ -168,66 +164,56 @@ public final class EUnary extends AExpression { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { if (operation == Operation.NOT) { if (tru == null && fals == null) { - final Label localfals = new Label(); - final Label end = new Label(); + Label localfals = new Label(); + Label end = new Label(); child.fals = localfals; - child.write(settings, definition, adapter); + child.write(writer); - adapter.push(false); - adapter.goTo(end); - adapter.mark(localfals); - adapter.push(true); - adapter.mark(end); + writer.push(false); + writer.goTo(end); + writer.mark(localfals); + writer.push(true); + writer.mark(end); } else { child.tru = fals; child.fals = tru; - child.write(settings, definition, adapter); + child.write(writer); } } else { - final org.objectweb.asm.Type type = actual.type; - final Sort sort = actual.sort; + org.objectweb.asm.Type type = actual.type; + Sort sort = actual.sort; - child.write(settings, definition, adapter); + child.write(writer); if (operation == Operation.BWNOT) { if (sort == Sort.DEF) { - adapter.invokeStatic(definition.defobjType.type, DEF_NOT_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_NOT_CALL); } else { if (sort == Sort.INT) { - adapter.push(-1); + writer.push(-1); } else if (sort == Sort.LONG) { - adapter.push(-1L); + writer.push(-1L); } else { throw new IllegalStateException(error("Illegal tree structure.")); } - adapter.math(MethodWriter.XOR, type); + writer.math(MethodWriter.XOR, type); } } else if (operation == Operation.SUB) { if (sort == Sort.DEF) { - adapter.invokeStatic(definition.defobjType.type, DEF_NEG_CALL); + writer.invokeStatic(DEF_UTIL_TYPE, DEF_NEG_CALL); } else { - if (settings.getNumericOverflow()) { - adapter.math(MethodWriter.NEG, type); - } else { - if (sort == Sort.INT) { - adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_INT); - } else if (sort == Sort.LONG) { - adapter.invokeStatic(definition.mathType.type, NEGATEEXACT_LONG); - } else { - throw new IllegalStateException(error("Illegal tree structure.")); - } - } + writer.math(MethodWriter.NEG, type); } } else if (operation != Operation.ADD) { throw new IllegalStateException(error("Illegal tree structure.")); } - adapter.writeBranch(tru, fals); + writer.writeBranch(tru, fals); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java index 5803fcfa273..cea5c629b14 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LArrayLength.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,14 +30,14 @@ public final class LArrayLength extends ALink { final String value; - LArrayLength(final int line, final String location, final String value) { - super(line, location, -1); + LArrayLength(int line, int offset, String location, String value) { + super(line, offset, location, -1); this.value = value; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if ("length".equals(value)) { if (!load) { throw new IllegalArgumentException(error("Must read array field [length].")); @@ -46,7 +45,7 @@ public final class LArrayLength extends ALink { throw new IllegalArgumentException(error("Cannot write to read-only array field [length].")); } - after = definition.intType; + after = Definition.INT_TYPE; } else { throw new IllegalArgumentException(error("Illegal field access [" + value + "].")); } @@ -55,17 +54,17 @@ public final class LArrayLength extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.arrayLength(); + void load(MethodWriter writer) { + writer.arrayLength(); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java index b38826f9e7e..4411913ea07 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LBrace.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -35,52 +34,52 @@ public final class LBrace extends ALink { AExpression index; - public LBrace(final int line, final String location, final AExpression index) { - super(line, location, 2); + public LBrace(int line, int offset, String location, AExpression index) { + super(line, offset, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { - throw new IllegalStateException(error("Illegal tree structure.")); + throw new IllegalArgumentException(error("Illegal array access made without target.")); } final Sort sort = before.sort; if (sort == Sort.ARRAY) { - index.expected = definition.intType; - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.expected = Definition.INT_TYPE; + index.analyze(variables); + index = index.cast(variables); - after = definition.getType(before.struct, before.dimensions - 1); + after = Definition.getType(before.struct, before.dimensions - 1); return this; } else if (sort == Sort.DEF) { - return new LDefArray(line, location, index).copy(this).analyze(settings, definition, variables); + return new LDefArray(line, offset, location, index).copy(this).analyze(variables); } else if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LMapShortcut(line, offset, location, index).copy(this).analyze(variables); } else if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LListShortcut(line, offset, location, index).copy(this).analyze(variables); } throw new IllegalArgumentException(error("Illegal array access on type [" + before.name + "].")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter writer) { + index.write(writer); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.arrayLoad(after.type); + void load(MethodWriter writer) { + writer.arrayLoad(after.type); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.arrayStore(after.type); + void store(MethodWriter writer) { + writer.arrayStore(after.type); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java index bd76aa293cc..fdb369468bc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCall.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Struct; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -38,77 +38,78 @@ public final class LCall extends ALink { Method method = null; - public LCall(final int line, final String location, final String name, final List arguments) { - super(line, location, -1); + public LCall(int line, int offset, String location, String name, List arguments) { + super(line, offset, location, -1); this.name = name; this.arguments = arguments; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { - throw new IllegalStateException(error("Illegal tree structure.")); - } else if (before.sort == Definition.Sort.ARRAY) { + throw new IllegalArgumentException(error("Illegal call [" + name + "] made without target.")); + } else if (before.sort == Sort.ARRAY) { throw new IllegalArgumentException(error("Illegal call [" + name + "] on array type.")); } else if (store) { throw new IllegalArgumentException(error("Cannot assign a value to a call [" + name + "].")); } Definition.MethodKey methodKey = new Definition.MethodKey(name, arguments.size()); - final Struct struct = before.struct; + Struct struct = before.struct; method = statik ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); if (method != null) { for (int argument = 0; argument < arguments.size(); ++argument) { - final AExpression expression = arguments.get(argument); + AExpression expression = arguments.get(argument); expression.expected = method.arguments.get(argument); - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.internal = true; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } statement = true; after = method.rtn; return this; - } else if (before.sort == Definition.Sort.DEF) { - final ALink link = new LDefCall(line, location, name, arguments); + } else if (before.sort == Sort.DEF) { + ALink link = new LDefCall(line, offset, location, name, arguments); link.copy(this); - return link.analyze(settings, definition, variables); + return link.analyze(variables); } - throw new IllegalArgumentException(error("Unknown call [" + name + "] with [" + arguments.size() + + throw new IllegalArgumentException(error("Unknown call [" + name + "] with [" + arguments.size() + "] arguments on type [" + struct.name + "].")); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + void load(MethodWriter writer) { + for (AExpression argument : arguments) { + argument.write(writer); } - if (java.lang.reflect.Modifier.isStatic(method.reflect.getModifiers())) { - adapter.invokeStatic(method.owner.type, method.method); + if (java.lang.reflect.Modifier.isStatic(method.modifiers)) { + writer.invokeStatic(method.owner.type, method.method); } else if (java.lang.reflect.Modifier.isInterface(method.owner.clazz.getModifiers())) { - adapter.invokeInterface(method.owner.type, method.method); + writer.invokeInterface(method.owner.type, method.method); } else { - adapter.invokeVirtual(method.owner.type, method.method); + writer.invokeVirtual(method.owner.type, method.method); } if (!method.rtn.clazz.equals(method.handle.type().returnType())) { - adapter.checkCast(method.rtn.type); + writer.checkCast(method.rtn.type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java index eb7fb3a6b10..06dee04fca6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LCast.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.AnalyzerCaster; @@ -35,43 +34,43 @@ public final class LCast extends ALink { Cast cast = null; - public LCast(final int line, final String location, final String type) { - super(line, location, -1); + public LCast(int line, int offset, String location, String type) { + super(line, offset, location, -1); this.type = type; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before == null) { - throw new IllegalStateException(error("Illegal tree structure.")); + throw new IllegalStateException(error("Illegal cast without a target.")); } else if (store) { throw new IllegalArgumentException(error("Cannot assign a value to a cast.")); } try { - after = definition.getType(type); - } catch (final IllegalArgumentException exception) { + after = Definition.getType(type); + } catch (IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + type + "].")); } - cast = AnalyzerCaster.getLegalCast(definition, location, before, after, true); + cast = AnalyzerCaster.getLegalCast(location, before, after, true, false); return cast != null ? this : null; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.writeCast(cast); + void write(MethodWriter writer) { + writer.writeCast(cast); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter writer) { // Do nothing. } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java index 98b2fbe7bf9..452a0732cdb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -35,38 +34,37 @@ final class LDefArray extends ALink implements IDefLink { AExpression index; - LDefArray(final int line, final String location, final AExpression index) { - super(line, location, 2); + LDefArray(int line, int offset, String location, AExpression index) { + super(line, offset, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - index.analyze(settings, definition, variables); + ALink analyze(Variables variables) { + index.analyze(variables); index.expected = index.actual; - index = index.cast(settings, definition, variables); + index = index.cast(variables); - after = definition.defType; + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter writer) { + index.write(writer); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(after.type, definition.defType.type, index.actual.type); - adapter.invokeDynamic("arrayLoad", desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.ARRAY_LOAD); + void load(MethodWriter writer) { + String desc = Type.getMethodDescriptor(after.type, Definition.DEF_TYPE.type, index.actual.type); + writer.invokeDynamic("arrayLoad", desc, DEF_BOOTSTRAP_HANDLE, (Object)DefBootstrap.ARRAY_LOAD); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(definition.voidType.type, definition.defType.type, - index.actual.type, after.type); - adapter.invokeDynamic("arrayStore", desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.ARRAY_STORE); + void store(MethodWriter writer) { + String desc = Type.getMethodDescriptor(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, index.actual.type, after.type); + writer.invokeDynamic("arrayStore", desc, DEF_BOOTSTRAP_HANDLE, (Object)DefBootstrap.ARRAY_STORE); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java index 9dbe65110ee..8ddde1cbed4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefCall.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -37,58 +36,57 @@ final class LDefCall extends ALink implements IDefLink { final String name; final List arguments; - LDefCall(final int line, final String location, final String name, final List arguments) { - super(line, location, -1); + LDefCall(int line, int offset, String location, String name, List arguments) { + super(line, offset, location, -1); this.name = name; this.arguments = arguments; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { for (int argument = 0; argument < arguments.size(); ++argument) { - final AExpression expression = arguments.get(argument); + AExpression expression = arguments.get(argument); - expression.analyze(settings, definition, variables); + expression.internal = true; + expression.analyze(variables); expression.expected = expression.actual; - arguments.set(argument, expression.cast(settings, definition, variables)); + arguments.set(argument, expression.cast(variables)); } statement = true; - after = definition.defType; + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final StringBuilder signature = new StringBuilder(); + void load(MethodWriter writer) { + StringBuilder signature = new StringBuilder(); signature.append('('); // first parameter is the receiver, we never know its type: always Object - signature.append(definition.defType.type.getDescriptor()); + signature.append(Definition.DEF_TYPE.type.getDescriptor()); - // TODO: remove our explicit conversions and feed more type information for return value, - // it can avoid some unnecessary boxing etc. - for (final AExpression argument : arguments) { + for (AExpression argument : arguments) { signature.append(argument.actual.type.getDescriptor()); - argument.write(settings, definition, adapter); + argument.write(writer); } signature.append(')'); // return value signature.append(after.type.getDescriptor()); - adapter.invokeDynamic(name, signature.toString(), DEF_BOOTSTRAP_HANDLE, DefBootstrap.METHOD_CALL); + writer.invokeDynamic(name, signature.toString(), DEF_BOOTSTRAP_HANDLE, (Object)DefBootstrap.METHOD_CALL); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java index 759b407cb5a..b0a9d4dd0d9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LDefField.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Variables; @@ -35,34 +34,34 @@ final class LDefField extends ALink implements IDefLink { final String value; - LDefField(final int line, final String location, final String value) { - super(line, location, 1); + LDefField(int line, int offset, String location, String value) { + super(line, offset, location, 1); this.value = value; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - after = definition.defType; + ALink analyze(Variables variables) { + after = Definition.DEF_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(after.type, definition.defType.type); - adapter.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.LOAD); + void load(MethodWriter writer) { + String desc = Type.getMethodDescriptor(after.type, Definition.DEF_TYPE.type); + writer.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, (Object)DefBootstrap.LOAD); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - final String desc = Type.getMethodDescriptor(definition.voidType.type, definition.defType.type, after.type); - adapter.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, DefBootstrap.STORE); + void store(MethodWriter writer) { + String desc = Type.getMethodDescriptor(Definition.VOID_TYPE.type, Definition.DEF_TYPE.type, after.type); + writer.invokeDynamic(value, desc, DEF_BOOTSTRAP_HANDLE, (Object)DefBootstrap.STORE); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java index 06f820eba26..ce986413375 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LField.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Field; import org.elasticsearch.painless.Definition.Sort; @@ -39,31 +38,31 @@ public final class LField extends ALink { Field field; - public LField(final int line, final String location, final String value) { - super(line, location, 1); + public LField(int line, int offset, String location, String value) { + super(line, offset, location, 1); this.value = value; } @Override - ALink analyze(CompilerSettings settings, Definition definition, Variables variables) { + ALink analyze(Variables variables) { if (before == null) { - throw new IllegalStateException(error("Illegal tree structure.")); + throw new IllegalArgumentException(error("Illegal field [" + value + "] access made without target.")); } - final Sort sort = before.sort; + Sort sort = before.sort; if (sort == Sort.ARRAY) { - return new LArrayLength(line, location, value).copy(this).analyze(settings, definition, variables); + return new LArrayLength(line, offset, location, value).copy(this).analyze(variables); } else if (sort == Sort.DEF) { - return new LDefField(line, location, value).copy(this).analyze(settings, definition, variables); + return new LDefField(line, offset, location, value).copy(this).analyze(variables); } - final Struct struct = before.struct; + Struct struct = before.struct; field = statik ? struct.staticMembers.get(value) : struct.members.get(value); if (field != null) { - if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { + if (store && java.lang.reflect.Modifier.isFinal(field.modifiers)) { throw new IllegalArgumentException(error( "Cannot write to read-only field [" + value + "] for type [" + struct.name + "].")); } @@ -72,25 +71,26 @@ public final class LField extends ALink { return this; } else { - // TODO: improve this: the isXXX case seems missing??? - final boolean shortcut = - struct.methods.containsKey(new Definition.MethodKey("get" + - Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) || - struct.methods.containsKey(new Definition.MethodKey("set" + - Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + boolean shortcut = + struct.methods.containsKey(new Definition.MethodKey("get" + + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) || + struct.methods.containsKey(new Definition.MethodKey("is" + + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)) || + struct.methods.containsKey(new Definition.MethodKey("set" + + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (shortcut) { - return new LShortcut(line, location, value).copy(this).analyze(settings, definition, variables); + return new LShortcut(line, offset, location, value).copy(this).analyze(variables); } else { - final EConstant index = new EConstant(line, location, value); - index.analyze(settings, definition, variables); + EConstant index = new EConstant(line, offset, location, value); + index.analyze(variables); if (Map.class.isAssignableFrom(before.clazz)) { - return new LMapShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LMapShortcut(line, offset, location, index).copy(this).analyze(variables); } - + if (List.class.isAssignableFrom(before.clazz)) { - return new LListShortcut(line, location, index).copy(this).analyze(settings, definition, variables); + return new LListShortcut(line, offset, location, index).copy(this).analyze(variables); } } } @@ -99,33 +99,25 @@ public final class LField extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { - adapter.getStatic(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - adapter.checkCast(field.generic.type); - } + void load(MethodWriter writer) { + if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { + writer.getStatic(field.owner.type, field.javaName, field.type.type); } else { - adapter.getField(field.owner.type, field.reflect.getName(), field.type.type); - - if (!field.generic.clazz.equals(field.type.clazz)) { - adapter.checkCast(field.generic.type); - } + writer.getField(field.owner.type, field.javaName, field.type.type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - if (java.lang.reflect.Modifier.isStatic(field.reflect.getModifiers())) { - adapter.putStatic(field.owner.type, field.reflect.getName(), field.type.type); + void store(MethodWriter writer) { + if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { + writer.putStatic(field.owner.type, field.javaName, field.type.type); } else { - adapter.putField(field.owner.type, field.reflect.getName(), field.type.type); + writer.putField(field.owner.type, field.javaName, field.type.type); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java index 8526ef1297e..f95c24f8592 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LListShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -35,14 +34,14 @@ final class LListShortcut extends ALink { Method getter; Method setter; - LListShortcut(final int line, final String location, final AExpression index) { - super(line, location, 2); + LListShortcut(int line, int offset, String location, AExpression index) { + super(line, offset, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); setter = before.struct.methods.get(new Definition.MethodKey("set", 2)); @@ -61,9 +60,9 @@ final class LListShortcut extends ALink { } if ((load || store) && (!load || getter != null) && (!store || setter != null)) { - index.expected = definition.intType; - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.expected = Definition.INT_TYPE; + index.analyze(variables); + index = index.cast(variables); after = setter != null ? setter.arguments.get(1) : getter.rtn; } else { @@ -74,31 +73,31 @@ final class LListShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter writer) { + index.write(writer); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { - adapter.invokeInterface(getter.owner.type, getter.method); + writer.invokeInterface(getter.owner.type, getter.method); } else { - adapter.invokeVirtual(getter.owner.type, getter.method); + writer.invokeVirtual(getter.owner.type, getter.method); } if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { - adapter.checkCast(getter.rtn.type); + writer.checkCast(getter.rtn.type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { - adapter.invokeInterface(setter.owner.type, setter.method); + writer.invokeInterface(setter.owner.type, setter.method); } else { - adapter.invokeVirtual(setter.owner.type, setter.method); + writer.invokeVirtual(setter.owner.type, setter.method); } - adapter.writePop(setter.rtn.sort.size); + writer.writePop(setter.rtn.sort.size); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java index 4efbd4bdf0f..afe93ba1048 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LMapShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -35,14 +34,14 @@ final class LMapShortcut extends ALink { Method getter; Method setter; - LMapShortcut(final int line, final String location, final AExpression index) { - super(line, location, 2); + LMapShortcut(int line, int offset, String location, AExpression index) { + super(line, offset, location, 2); this.index = index; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { getter = before.struct.methods.get(new Definition.MethodKey("get", 1)); setter = before.struct.methods.get(new Definition.MethodKey("put", 2)); @@ -61,8 +60,8 @@ final class LMapShortcut extends ALink { if ((load || store) && (!load || getter != null) && (!store || setter != null)) { index.expected = setter != null ? setter.arguments.get(0) : getter.arguments.get(0); - index.analyze(settings, definition, variables); - index = index.cast(settings, definition, variables); + index.analyze(variables); + index = index.cast(variables); after = setter != null ? setter.arguments.get(1) : getter.rtn; } else { @@ -73,31 +72,31 @@ final class LMapShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - index.write(settings, definition, adapter); + void write(MethodWriter writer) { + index.write(writer); } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { - adapter.invokeInterface(getter.owner.type, getter.method); + writer.invokeInterface(getter.owner.type, getter.method); } else { - adapter.invokeVirtual(getter.owner.type, getter.method); + writer.invokeVirtual(getter.owner.type, getter.method); } if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { - adapter.checkCast(getter.rtn.type); + writer.checkCast(getter.rtn.type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { - adapter.invokeInterface(setter.owner.type, setter.method); + writer.invokeInterface(setter.owner.type, setter.method); } else { - adapter.invokeVirtual(setter.owner.type, setter.method); + writer.invokeVirtual(setter.owner.type, setter.method); } - adapter.writePop(setter.rtn.sort.size); + writer.writePop(setter.rtn.sort.size); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java index 67f9769bd6a..ffe6168f812 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; @@ -35,64 +34,64 @@ public final class LNewArray extends ALink { final String type; final List arguments; - public LNewArray(final int line, final String location, final String type, final List arguments) { - super(line, location, -1); + public LNewArray(int line, int offset, String location, String type, List arguments) { + super(line, offset, location, -1); this.type = type; this.arguments = arguments; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { - throw new IllegalStateException(error("Illegal tree structure.")); + throw new IllegalArgumentException(error("Cannot create a new array with a target already defined.")); } else if (store) { throw new IllegalArgumentException(error("Cannot assign a value to a new array.")); } else if (!load) { - throw new IllegalArgumentException(error("A newly created array must be assigned.")); + throw new IllegalArgumentException(error("A newly created array must be read.")); } final Type type; try { - type = definition.getType(this.type); - } catch (final IllegalArgumentException exception) { + type = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } for (int argument = 0; argument < arguments.size(); ++argument) { - final AExpression expression = arguments.get(argument); + AExpression expression = arguments.get(argument); - expression.expected = definition.intType; - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.expected = Definition.INT_TYPE; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } - after = definition.getType(type.struct, arguments.size()); + after = Definition.getType(type.struct, arguments.size()); return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + void load(MethodWriter writer) { + for (AExpression argument : arguments) { + argument.write(writer); } if (arguments.size() > 1) { - adapter.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions()); + writer.visitMultiANewArrayInsn(after.type.getDescriptor(), after.type.getDimensions()); } else { - adapter.newArray(definition.getType(after.struct, 0).type); + writer.newArray(Definition.getType(after.struct, 0).type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java index 227b63cf31f..dc8b711ee8c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LNewObj.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Constructor; import org.elasticsearch.painless.Definition.Struct; @@ -39,17 +38,17 @@ public final class LNewObj extends ALink { Constructor constructor; - public LNewObj(final int line, final String location, final String type, final List arguments) { - super(line, location, -1); + public LNewObj(int line, int offset, String location, String type, List arguments) { + super(line, offset, location, -1); this.type = type; this.arguments = arguments; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { - throw new IllegalStateException(error("Illegal tree structure")); + throw new IllegalArgumentException(error("Illegal new call with a target already defined.")); } else if (store) { throw new IllegalArgumentException(error("Cannot assign a value to a new call.")); } @@ -57,16 +56,16 @@ public final class LNewObj extends ALink { final Type type; try { - type = definition.getType(this.type); - } catch (final IllegalArgumentException exception) { + type = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } - final Struct struct = type.struct; + Struct struct = type.struct; constructor = struct.constructors.get(new Definition.MethodKey("new", arguments.size())); if (constructor != null) { - final Type[] types = new Type[constructor.arguments.size()]; + Type[] types = new Type[constructor.arguments.size()]; constructor.arguments.toArray(types); if (constructor.arguments.size() != arguments.size()) { @@ -75,11 +74,12 @@ public final class LNewObj extends ALink { } for (int argument = 0; argument < arguments.size(); ++argument) { - final AExpression expression = arguments.get(argument); + AExpression expression = arguments.get(argument); expression.expected = types[argument]; - expression.analyze(settings, definition, variables); - arguments.set(argument, expression.cast(settings, definition, variables)); + expression.internal = true; + expression.analyze(variables); + arguments.set(argument, expression.cast(variables)); } statement = true; @@ -92,27 +92,27 @@ public final class LNewObj extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.newInstance(after.type); + void load(MethodWriter writer) { + writer.newInstance(after.type); if (load) { - adapter.dup(); + writer.dup(); } - for (final AExpression argument : arguments) { - argument.write(settings, definition, adapter); + for (AExpression argument : arguments) { + argument.write(writer); } - adapter.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(constructor.owner.type, constructor.method); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java index c65077e6e28..c6aafd47bff 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LShortcut.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Sort; @@ -37,17 +36,22 @@ final class LShortcut extends ALink { Method getter = null; Method setter = null; - LShortcut(final int line, final String location, final String value) { - super(line, location, 1); + LShortcut(int line, int offset, String location, String value) { + super(line, offset, location, 1); this.value = value; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - final Struct struct = before.struct; + ALink analyze(Variables variables) { + Struct struct = before.struct; getter = struct.methods.get(new Definition.MethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + + if (getter == null) { + getter = struct.methods.get(new Definition.MethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + } + setter = struct.methods.get(new Definition.MethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { @@ -74,31 +78,31 @@ final class LShortcut extends ALink { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void load(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(getter.owner.clazz.getModifiers())) { - adapter.invokeInterface(getter.owner.type, getter.method); + writer.invokeInterface(getter.owner.type, getter.method); } else { - adapter.invokeVirtual(getter.owner.type, getter.method); + writer.invokeVirtual(getter.owner.type, getter.method); } if (!getter.rtn.clazz.equals(getter.handle.type().returnType())) { - adapter.checkCast(getter.rtn.type); + writer.checkCast(getter.rtn.type); } } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { if (java.lang.reflect.Modifier.isInterface(setter.owner.clazz.getModifiers())) { - adapter.invokeInterface(setter.owner.type, setter.method); + writer.invokeInterface(setter.owner.type, setter.method); } else { - adapter.invokeVirtual(setter.owner.type, setter.method); + writer.invokeVirtual(setter.owner.type, setter.method); } - adapter.writePop(setter.rtn.sort.size); + writer.writePop(setter.rtn.sort.size); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java new file mode 100644 index 00000000000..8919b75dc27 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LStatic.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.Variables; + +/** + * Represents a static type target. + */ +public final class LStatic extends ALink { + + final String type; + + public LStatic(int line, int offset, String location, String type) { + super(line, offset, location, 0); + + this.type = type; + } + + @Override + ALink analyze(Variables variables) { + if (before != null) { + throw new IllegalArgumentException(error("Illegal static type [" + type + "] after target already defined.")); + } + + try { + after = Definition.getType(type); + statik = true; + } catch (IllegalArgumentException exception) { + throw new IllegalArgumentException(error("Not a type [" + type + "].")); + } + + return this; + } + + @Override + void write(MethodWriter writer) { + throw new IllegalStateException(error("Illegal tree structure.")); + } + + @Override + void load(MethodWriter writer) { + throw new IllegalStateException(error("Illegal tree structure.")); + } + + @Override + void store(MethodWriter writer) { + throw new IllegalStateException(error("Illegal tree structure.")); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java index 1d11652f483..446750a22bb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LString.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,39 +28,39 @@ import org.elasticsearch.painless.MethodWriter; */ public final class LString extends ALink { - public LString(final int line, final String location, final String string) { - super(line, location, -1); + public LString(int line, int offset, String location, String string) { + super(line, offset, location, -1); this.string = string; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { - throw new IllegalStateException("Illegal tree structure."); + throw new IllegalArgumentException(error("Illegal String constant [" + string + "].")); } else if (store) { throw new IllegalArgumentException(error("Cannot write to read-only String constant [" + string + "].")); } else if (!load) { throw new IllegalArgumentException(error("Must read String constant [" + string + "].")); } - after = definition.stringType; + after = Definition.STRING_TYPE; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.push(string); + void load(MethodWriter writer) { + writer.push(string); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void store(MethodWriter writer) { throw new IllegalStateException(error("Illegal tree structure.")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java index 35a652f5b84..3a555d93c87 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/LVariable.java @@ -19,13 +19,10 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Type; +import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.Variables.Variable; import org.objectweb.asm.Opcodes; -import org.elasticsearch.painless.MethodWriter; /** * Represents a variable load/store. @@ -36,55 +33,42 @@ public final class LVariable extends ALink { int slot; - public LVariable(final int line, final String location, final String name) { - super(line, location, 0); + public LVariable(int line, int offset, String location, String name) { + super(line, offset, location, 0); this.name = name; } @Override - ALink analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + ALink analyze(Variables variables) { if (before != null) { - throw new IllegalStateException(error("Illegal tree structure.")); + throw new IllegalArgumentException(error("Illegal variable [" + name + "] access with target already defined.")); } - Type type = null; + Variable variable = variables.getVariable(location, name); - try { - type = definition.getType(name); - } catch (final IllegalArgumentException exception) { - // Do nothing. + if (store && variable.readonly) { + throw new IllegalArgumentException(error("Variable [" + variable.name + "] is read-only.")); } - if (type != null) { - statik = true; - after = type; - } else { - final Variable variable = variables.getVariable(location, name); - - if (store && variable.readonly) { - throw new IllegalArgumentException(error("Variable [" + variable.name + "] is read-only.")); - } - - slot = variable.slot; - after = variable.type; - } + slot = variable.slot; + after = variable.type; return this; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { + void write(MethodWriter writer) { // Do nothing. } @Override - void load(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), slot); + void load(MethodWriter writer) { + writer.visitVarInsn(after.type.getOpcode(Opcodes.ILOAD), slot); } @Override - void store(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - adapter.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), slot); + void store(MethodWriter writer) { + writer.visitVarInsn(after.type.getOpcode(Opcodes.ISTORE), slot); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index 802d5b6c415..0f05243f994 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -34,17 +32,21 @@ public final class SBlock extends AStatement { final List statements; - public SBlock(final int line, final String location, final List statements) { - super(line, location); + public SBlock(int line, int offset, String location, List statements) { + super(line, offset, location); this.statements = Collections.unmodifiableList(statements); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { + if (statements == null || statements.isEmpty()) { + throw new IllegalArgumentException(error("A block must contain at least one statement.")); + } + final AStatement last = statements.get(statements.size() - 1); - for (final AStatement statement : statements) { + for (AStatement statement : statements) { if (allEscape) { throw new IllegalArgumentException(error("Unreachable statement.")); } @@ -53,7 +55,7 @@ public final class SBlock extends AStatement { statement.lastSource = lastSource && statement == last; statement.lastLoop = (beginLoop || lastLoop) && statement == last; - statement.analyze(settings, definition, variables); + statement.analyze(variables); methodEscape = statement.methodEscape; loopEscape = statement.loopEscape; @@ -65,11 +67,11 @@ public final class SBlock extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final AStatement statement : statements) { + void write(MethodWriter writer) { + for (AStatement statement : statements) { statement.continu = continu; statement.brake = brake; - statement.write(settings, definition, adapter); + statement.write(writer); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java index 3998b1a9ff2..727cd4b929c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,12 +27,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SBreak extends AStatement { - public SBreak(final int line, final String location) { - super(line, location); + public SBreak(int line, int offset, String location) { + super(line, offset, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (!inLoop) { throw new IllegalArgumentException(error("Break statement outside of a loop.")); } @@ -46,8 +44,9 @@ public final class SBreak extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - adapter.goTo(brake); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + writer.goTo(brake); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java similarity index 67% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index acb50a2b962..b3b32d741cd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.Variables.Variable; import org.objectweb.asm.Label; @@ -30,11 +30,11 @@ import org.elasticsearch.painless.MethodWriter; /** * Represents a catch block as part of a try-catch block. */ -public final class STrap extends AStatement { +public final class SCatch extends AStatement { final String type; final String name; - final AStatement block; + final SBlock block; Variable variable; @@ -42,8 +42,8 @@ public final class STrap extends AStatement { Label end; Label exception; - public STrap(final int line, final String location, final String type, final String name, final AStatement block) { - super(line, location); + public SCatch(int line, int offset, String location, String type, String name, SBlock block) { + super(line, offset, location); this.type = type; this.name = name; @@ -51,19 +51,27 @@ public final class STrap extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - variable = variables.addVariable(location, type, name, true, false); + void analyze(Variables variables) { + final Type type; - if (!Exception.class.isAssignableFrom(variable.type.clazz)) { - throw new ClassCastException(error("Not an exception type [" + variable.type.name + "].")); + try { + type = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { + throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); } + if (!Exception.class.isAssignableFrom(type.clazz)) { + throw new ClassCastException(error("Not an exception type [" + this.type + "].")); + } + + variable = variables.addVariable(location, type, name, true, false); + if (block != null) { block.lastSource = lastSource; block.inLoop = inLoop; block.lastLoop = lastLoop; - block.analyze(settings, definition, variables); + block.analyze(variables); methodEscape = block.methodEscape; loopEscape = block.loopEscape; @@ -75,23 +83,24 @@ public final class STrap extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label jump = new Label(); + void write(MethodWriter writer) { + writeDebugInfo(writer); - adapter.mark(jump); - adapter.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.slot); + Label jump = new Label(); + + writer.mark(jump); + writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.slot); if (block != null) { block.continu = continu; block.brake = brake; - block.write(settings, definition, adapter); + block.write(writer); } - adapter.visitTryCatchBlock(begin, end, jump, variable.type.type.getInternalName()); + writer.visitTryCatchBlock(begin, end, jump, variable.type.type.getInternalName()); if (exception != null && !block.allEscape) { - adapter.goTo(exception); + writer.goTo(exception); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java index 2c4e33b6326..bfea104f23d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -29,12 +27,12 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SContinue extends AStatement { - public SContinue(final int line, final String location) { - super(line, location); + public SContinue(int line, int offset, String location) { + super(line, offset, location); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { if (!inLoop) { throw new IllegalArgumentException(error("Continue statement outside of a loop.")); } @@ -49,8 +47,9 @@ public final class SContinue extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - adapter.goTo(continu); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + writer.goTo(continu); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java index 5494ef9c32a..ddfe54c22eb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -34,25 +32,25 @@ public final class SDeclBlock extends AStatement { final List declarations; - public SDeclBlock(final int line, final String location, final List declarations) { - super(line, location); + public SDeclBlock(int line, int offset, String location, List declarations) { + super(line, offset, location); this.declarations = Collections.unmodifiableList(declarations); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - for (final SDeclaration declaration : declarations) { - declaration.analyze(settings, definition, variables); + void analyze(Variables variables) { + for (SDeclaration declaration : declarations) { + declaration.analyze(variables); } statementCount = declarations.size(); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final SDeclaration declaration : declarations) { - declaration.write(settings, definition, adapter); + void write(MethodWriter writer) { + for (SDeclaration declaration : declarations) { + declaration.write(writer); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 189ddc95936..246fb992836 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -19,9 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Sort; +import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.Variables.Variable; import org.objectweb.asm.Opcodes; @@ -38,8 +37,8 @@ public final class SDeclaration extends AStatement { Variable variable; - public SDeclaration(final int line, final String location, final String type, final String name, final AExpression expression) { - super(line, location); + public SDeclaration(int line, int offset, String location, String type, String name, AExpression expression) { + super(line, offset, location); this.type = type; this.name = name; @@ -47,41 +46,45 @@ public final class SDeclaration extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - variable = variables.addVariable(location, type, name, false, false); + void analyze(Variables variables) { + final Type type; + + try { + type = Definition.getType(this.type); + } catch (IllegalArgumentException exception) { + throw new IllegalArgumentException(error("Not a type [" + this.type + "].")); + } if (expression != null) { - expression.expected = variable.type; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + expression.expected = type; + expression.analyze(variables); + expression = expression.cast(variables); } + + variable = variables.addVariable(location, type, name, false, false); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final org.objectweb.asm.Type type = variable.type.type; - final Sort sort = variable.type.sort; + void write(MethodWriter writer) { + writeDebugInfo(writer); - final boolean initialize = expression == null; - - if (!initialize) { - expression.write(settings, definition, adapter); + if (expression == null) { + switch (variable.type.sort) { + case VOID: throw new IllegalStateException(error("Illegal tree structure.")); + case BOOL: + case BYTE: + case SHORT: + case CHAR: + case INT: writer.push(0); break; + case LONG: writer.push(0L); break; + case FLOAT: writer.push(0.0F); break; + case DOUBLE: writer.push(0.0); break; + default: writer.visitInsn(Opcodes.ACONST_NULL); + } + } else { + expression.write(writer); } - switch (sort) { - case VOID: throw new IllegalStateException(error("Illegal tree structure.")); - case BOOL: - case BYTE: - case SHORT: - case CHAR: - case INT: if (initialize) adapter.push(0); break; - case LONG: if (initialize) adapter.push(0L); break; - case FLOAT: if (initialize) adapter.push(0.0F); break; - case DOUBLE: if (initialize) adapter.push(0.0); break; - default: if (initialize) adapter.visitInsn(Opcodes.ACONST_NULL); - } - - adapter.visitVarInsn(type.getOpcode(Opcodes.ISTORE), variable.slot); + writer.visitVarInsn(variable.type.type.getOpcode(Opcodes.ISTORE), variable.slot); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 63d3df4bcee..3fbc3b9fafc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -30,32 +29,38 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SDo extends AStatement { - final AStatement block; + final int maxLoopCounter; + final SBlock block; AExpression condition; - public SDo(final int line, final String location, final AStatement block, final AExpression condition) { - super(line, location); + public SDo(int line, int offset, String location, int maxLoopCounter, SBlock block, AExpression condition) { + super(line, offset, location); this.condition = condition; this.block = block; + this.maxLoopCounter = maxLoopCounter; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); + if (block == null) { + throw new IllegalArgumentException(error("Extraneous do while loop.")); + } + block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { throw new IllegalArgumentException(error("Extraneous do while loop.")); } - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { final boolean continuous = (boolean)condition.constant; @@ -72,7 +77,7 @@ public final class SDo extends AStatement { statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -80,26 +85,27 @@ public final class SDo extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label start = new Label(); - final Label begin = new Label(); - final Label end = new Label(); + void write(MethodWriter writer) { + writeDebugInfo(writer); - adapter.mark(start); + Label start = new Label(); + Label begin = new Label(); + Label end = new Label(); + + writer.mark(start); block.continu = begin; block.brake = end; - block.write(settings, definition, adapter); + block.write(writer); - adapter.mark(begin); + writer.mark(begin); condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(writer); - adapter.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); + writer.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); - adapter.goTo(start); - adapter.mark(end); + writer.goTo(start); + writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index dd7cffaa970..a9f47fb6522 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Variables; @@ -32,16 +31,16 @@ public final class SExpression extends AStatement { AExpression expression; - public SExpression(final int line, final String location, final AExpression expression) { - super(line, location); + public SExpression(int line, int offset, String location, AExpression expression) { + super(line, offset, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { expression.read = lastSource; - expression.analyze(settings, definition, variables); + expression.analyze(variables); if (!lastSource && !expression.statement) { throw new IllegalArgumentException(error("Not a statement.")); @@ -49,8 +48,9 @@ public final class SExpression extends AStatement { final boolean rtn = lastSource && expression.actual.sort != Sort.VOID; - expression.expected = rtn ? definition.objectType : expression.actual; - expression = expression.cast(settings, definition, variables); + expression.expected = rtn ? Definition.OBJECT_TYPE : expression.actual; + expression.internal = rtn; + expression = expression.cast(variables); methodEscape = rtn; loopEscape = rtn; @@ -59,14 +59,15 @@ public final class SExpression extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - expression.write(settings, definition, adapter); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + expression.write(writer); if (methodEscape) { - adapter.returnValue(); + writer.returnValue(); } else { - adapter.writePop(expression.expected.sort.size); + writer.writePop(expression.expected.sort.size); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index 43c978b47fa..5fb1845e551 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -30,35 +29,37 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SFor extends AStatement { + final int maxLoopCounter; ANode initializer; AExpression condition; AExpression afterthought; - final AStatement block; + final SBlock block; - public SFor(final int line, final String location, - final ANode initializer, final AExpression condition, final AExpression afterthought, final AStatement block) { - super(line, location); + public SFor(int line, int offset, String location, int maxLoopCounter, + ANode initializer, AExpression condition, AExpression afterthought, SBlock block) { + super(line, offset, location); this.initializer = initializer; this.condition = condition; this.afterthought = afterthought; this.block = block; + this.maxLoopCounter = maxLoopCounter; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); boolean continuous = false; if (initializer != null) { if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).analyze(settings, definition, variables); + ((SDeclBlock)initializer).analyze(variables); } else if (initializer instanceof AExpression) { - final AExpression initializer = (AExpression)this.initializer; + AExpression initializer = (AExpression)this.initializer; initializer.read = false; - initializer.analyze(settings, definition, variables); + initializer.analyze(variables); if (!initializer.statement) { throw new IllegalArgumentException(initializer.error("Not a statement.")); @@ -69,10 +70,9 @@ public final class SFor extends AStatement { } if (condition != null) { - - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { continuous = (boolean)condition.constant; @@ -91,20 +91,18 @@ public final class SFor extends AStatement { if (afterthought != null) { afterthought.read = false; - afterthought.analyze(settings, definition, variables); + afterthought.analyze(variables); if (!afterthought.statement) { throw new IllegalArgumentException(afterthought.error("Not a statement.")); } } - int count = 1; - if (block != null) { block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { throw new IllegalArgumentException(error("Extraneous for loop.")); @@ -115,12 +113,12 @@ public final class SFor extends AStatement { allEscape = true; } - block.statementCount = Math.max(count, block.statementCount); + block.statementCount = Math.max(1, block.statementCount); } statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -128,26 +126,27 @@ public final class SFor extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label start = new Label(); - final Label begin = afterthought == null ? start : new Label(); - final Label end = new Label(); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + Label start = new Label(); + Label begin = afterthought == null ? start : new Label(); + Label end = new Label(); if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).write(settings, definition, adapter); + ((SDeclBlock)initializer).write(writer); } else if (initializer instanceof AExpression) { AExpression initializer = (AExpression)this.initializer; - initializer.write(settings, definition, adapter); - adapter.writePop(initializer.expected.sort.size); + initializer.write(writer); + writer.writePop(initializer.expected.sort.size); } - adapter.mark(start); + writer.mark(start); if (condition != null) { condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(writer); } boolean allEscape = false; @@ -161,21 +160,21 @@ public final class SFor extends AStatement { ++statementCount; } - adapter.writeLoopCounter(loopCounterSlot, statementCount); - block.write(settings, definition, adapter); + writer.writeLoopCounter(loopCounterSlot, statementCount); + block.write(writer); } else { - adapter.writeLoopCounter(loopCounterSlot, 1); + writer.writeLoopCounter(loopCounterSlot, 1); } if (afterthought != null) { - adapter.mark(begin); - afterthought.write(settings, definition, adapter); + writer.mark(begin); + afterthought.write(writer); } if (afterthought != null || !allEscape) { - adapter.goTo(start); + writer.goTo(start); } - adapter.mark(end); + writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java new file mode 100644 index 00000000000..180b5023811 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Variables; +import org.objectweb.asm.Label; +import org.elasticsearch.painless.MethodWriter; + +/** + * Represents an if block. + */ +public final class SIf extends AStatement { + + AExpression condition; + final SBlock ifblock; + + public SIf(int line, int offset, String location, AExpression condition, SBlock ifblock) { + super(line, offset, location); + + this.condition = condition; + this.ifblock = ifblock; + } + + @Override + void analyze(Variables variables) { + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); + + if (condition.constant != null) { + throw new IllegalArgumentException(error("Extraneous if statement.")); + } + + if (ifblock == null) { + throw new IllegalArgumentException(error("Extraneous if statement.")); + } + + ifblock.lastSource = lastSource; + ifblock.inLoop = inLoop; + ifblock.lastLoop = lastLoop; + + variables.incrementScope(); + ifblock.analyze(variables); + variables.decrementScope(); + + anyContinue = ifblock.anyContinue; + anyBreak = ifblock.anyBreak; + statementCount = ifblock.statementCount; + } + + @Override + void write(MethodWriter writer) { + writeDebugInfo(writer); + + Label fals = new Label(); + + condition.fals = fals; + condition.write(writer); + + ifblock.continu = continu; + ifblock.brake = brake; + ifblock.write(writer); + + writer.mark(fals); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index 698d8c8126c..217584f32ab 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -31,12 +30,11 @@ import org.elasticsearch.painless.MethodWriter; public final class SIfElse extends AStatement { AExpression condition; - final AStatement ifblock; - final AStatement elseblock; + final SBlock ifblock; + final SBlock elseblock; - public SIfElse(final int line, final String location, - final AExpression condition, final AStatement ifblock, final AStatement elseblock) { - super(line, location); + public SIfElse(int line, int offset, String location, AExpression condition, SBlock ifblock, SBlock elseblock) { + super(line, offset, location); this.condition = condition; this.ifblock = ifblock; @@ -44,70 +42,75 @@ public final class SIfElse extends AStatement { } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + void analyze(Variables variables) { + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); if (condition.constant != null) { throw new IllegalArgumentException(error("Extraneous if statement.")); } + if (ifblock == null) { + throw new IllegalArgumentException(error("Extraneous if statement.")); + } + ifblock.lastSource = lastSource; ifblock.inLoop = inLoop; ifblock.lastLoop = lastLoop; variables.incrementScope(); - ifblock.analyze(settings, definition, variables); + ifblock.analyze(variables); variables.decrementScope(); anyContinue = ifblock.anyContinue; anyBreak = ifblock.anyBreak; statementCount = ifblock.statementCount; - if (elseblock != null) { - elseblock.lastSource = lastSource; - elseblock.inLoop = inLoop; - elseblock.lastLoop = lastLoop; - - variables.incrementScope(); - elseblock.analyze(settings, definition, variables); - variables.decrementScope(); - - methodEscape = ifblock.methodEscape && elseblock.methodEscape; - loopEscape = ifblock.loopEscape && elseblock.loopEscape; - allEscape = ifblock.allEscape && elseblock.allEscape; - anyContinue |= elseblock.anyContinue; - anyBreak |= elseblock.anyBreak; - statementCount = Math.max(ifblock.statementCount, elseblock.statementCount); + if (elseblock == null) { + throw new IllegalArgumentException(error("Extraneous else statement.")); } + + elseblock.lastSource = lastSource; + elseblock.inLoop = inLoop; + elseblock.lastLoop = lastLoop; + + variables.incrementScope(); + elseblock.analyze(variables); + variables.decrementScope(); + + methodEscape = ifblock.methodEscape && elseblock.methodEscape; + loopEscape = ifblock.loopEscape && elseblock.loopEscape; + allEscape = ifblock.allEscape && elseblock.allEscape; + anyContinue |= elseblock.anyContinue; + anyBreak |= elseblock.anyBreak; + statementCount = Math.max(ifblock.statementCount, elseblock.statementCount); } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label end = new Label(); - final Label fals = elseblock != null ? new Label() : end; + void write(MethodWriter writer) { + writeDebugInfo(writer); + + Label end = new Label(); + Label fals = elseblock != null ? new Label() : end; condition.fals = fals; - condition.write(settings, definition, adapter); + condition.write(writer); ifblock.continu = continu; ifblock.brake = brake; - ifblock.write(settings, definition, adapter); + ifblock.write(writer); - if (elseblock != null) { - if (!ifblock.allEscape) { - adapter.goTo(end); - } - - adapter.mark(fals); - - elseblock.continu = continu; - elseblock.brake = brake; - elseblock.write(settings, definition, adapter); + if (!ifblock.allEscape) { + writer.goTo(end); } - adapter.mark(end); + writer.mark(fals); + + elseblock.continu = continu; + elseblock.brake = brake; + elseblock.write(writer); + + writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java index b959b47a96b..2f342a5b7b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,17 +30,18 @@ public final class SReturn extends AStatement { AExpression expression; - public SReturn(final int line, final String location, final AExpression expression) { - super(line, location); + public SReturn(int line, int offset, String location, AExpression expression) { + super(line, offset, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - expression.expected = definition.objectType; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + void analyze(Variables variables) { + expression.expected = Definition.OBJECT_TYPE; + expression.internal = true; + expression.analyze(variables); + expression = expression.cast(variables); methodEscape = true; loopEscape = true; @@ -51,9 +51,10 @@ public final class SReturn extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - expression.write(settings, definition, adapter); - adapter.returnValue(); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + expression.write(writer); + writer.returnValue(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index e5a80802ce8..899d54225dc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Opcodes; import org.elasticsearch.painless.MethodWriter; @@ -35,25 +33,29 @@ public final class SSource extends AStatement { final List statements; - public SSource(final int line, final String location, final List statements) { - super(line, location); + public SSource(int line, int offset, String location, List statements) { + super(line, offset, location); this.statements = Collections.unmodifiableList(statements); } @Override - public void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + public void analyze(Variables variables) { + if (statements == null || statements.isEmpty()) { + throw new IllegalArgumentException(error("Cannot generate an empty script.")); + } + variables.incrementScope(); final AStatement last = statements.get(statements.size() - 1); - for (final AStatement statement : statements) { + for (AStatement statement : statements) { if (allEscape) { throw new IllegalArgumentException(error("Unreachable statement.")); } statement.lastSource = statement == last; - statement.analyze(settings, definition, variables); + statement.analyze(variables); methodEscape = statement.methodEscape; allEscape = statement.allEscape; @@ -63,14 +65,14 @@ public final class SSource extends AStatement { } @Override - public void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - for (final AStatement statement : statements) { - statement.write(settings, definition, adapter); + public void write(MethodWriter writer) { + for (AStatement statement : statements) { + statement.write(writer); } if (!methodEscape) { - adapter.visitInsn(Opcodes.ACONST_NULL); - adapter.returnValue(); + writer.visitInsn(Opcodes.ACONST_NULL); + writer.returnValue(); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java index eac039b998e..d002f1f9dad 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.elasticsearch.painless.MethodWriter; @@ -31,17 +30,17 @@ public final class SThrow extends AStatement { AExpression expression; - public SThrow(final int line, final String location, final AExpression expression) { - super(line, location); + public SThrow(int line, int offset, String location, AExpression expression) { + super(line, offset, location); this.expression = expression; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { - expression.expected = definition.exceptionType; - expression.analyze(settings, definition, variables); - expression = expression.cast(settings, definition, variables); + void analyze(Variables variables) { + expression.expected = Definition.EXCEPTION_TYPE; + expression.analyze(variables); + expression = expression.cast(variables); methodEscape = true; loopEscape = true; @@ -50,9 +49,10 @@ public final class SThrow extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - expression.write(settings, definition, adapter); - adapter.throwException(); + void write(MethodWriter writer) { + writeDebugInfo(writer); + + expression.write(writer); + writer.throwException(); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java index 0329826b02a..a4ef00ef146 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java @@ -19,8 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; import org.elasticsearch.painless.MethodWriter; @@ -33,24 +31,28 @@ import java.util.List; */ public final class STry extends AStatement { - final AStatement block; - final List traps; + final SBlock block; + final List catches; - public STry(final int line, final String location, final AStatement block, final List traps) { - super(line, location); + public STry(int line, int offset, String location, SBlock block, List traps) { + super(line, offset, location); this.block = block; - this.traps = Collections.unmodifiableList(traps); + this.catches = Collections.unmodifiableList(traps); } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { + if (block == null) { + throw new IllegalArgumentException(error("Extraneous try statement.")); + } + block.lastSource = lastSource; block.inLoop = inLoop; block.lastLoop = lastLoop; variables.incrementScope(); - block.analyze(settings, definition, variables); + block.analyze(variables); variables.decrementScope(); methodEscape = block.methodEscape; @@ -61,55 +63,56 @@ public final class STry extends AStatement { int statementCount = 0; - for (final STrap trap : traps) { - trap.lastSource = lastSource; - trap.inLoop = inLoop; - trap.lastLoop = lastLoop; + for (SCatch catc : catches) { + catc.lastSource = lastSource; + catc.inLoop = inLoop; + catc.lastLoop = lastLoop; variables.incrementScope(); - trap.analyze(settings, definition, variables); + catc.analyze(variables); variables.decrementScope(); - methodEscape &= trap.methodEscape; - loopEscape &= trap.loopEscape; - allEscape &= trap.allEscape; - anyContinue |= trap.anyContinue; - anyBreak |= trap.anyBreak; + methodEscape &= catc.methodEscape; + loopEscape &= catc.loopEscape; + allEscape &= catc.allEscape; + anyContinue |= catc.anyContinue; + anyBreak |= catc.anyBreak; - statementCount = Math.max(statementCount, trap.statementCount); + statementCount = Math.max(statementCount, catc.statementCount); } this.statementCount = block.statementCount + statementCount; } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label begin = new Label(); - final Label end = new Label(); - final Label exception = new Label(); + void write(MethodWriter writer) { + writeDebugInfo(writer); - adapter.mark(begin); + Label begin = new Label(); + Label end = new Label(); + Label exception = new Label(); + + writer.mark(begin); block.continu = continu; block.brake = brake; - block.write(settings, definition, adapter); + block.write(writer); if (!block.allEscape) { - adapter.goTo(exception); + writer.goTo(exception); } - adapter.mark(end); + writer.mark(end); - for (final STrap trap : traps) { - trap.begin = begin; - trap.end = end; - trap.exception = traps.size() > 1 ? exception : null; - trap.write(settings, definition, adapter); + for (SCatch catc : catches) { + catc.begin = begin; + catc.end = end; + catc.exception = catches.size() > 1 ? exception : null; + catc.write(writer); } - if (!block.allEscape || traps.size() > 1) { - adapter.mark(exception); + if (!block.allEscape || catches.size() > 1) { + writer.mark(exception); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index f6f8ddc678f..322ae110f66 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Variables; import org.objectweb.asm.Label; @@ -30,23 +29,25 @@ import org.elasticsearch.painless.MethodWriter; */ public final class SWhile extends AStatement { + final int maxLoopCounter; AExpression condition; - final AStatement block; + final SBlock block; - public SWhile(final int line, final String location, final AExpression condition, final AStatement block) { - super(line, location); + public SWhile(int line, int offset, String location, int maxLoopCounter, AExpression condition, SBlock block) { + super(line, offset, location); + this.maxLoopCounter = maxLoopCounter; this.condition = condition; this.block = block; } @Override - void analyze(final CompilerSettings settings, final Definition definition, final Variables variables) { + void analyze(Variables variables) { variables.incrementScope(); - condition.expected = definition.booleanType; - condition.analyze(settings, definition, variables); - condition = condition.cast(settings, definition, variables); + condition.expected = Definition.BOOLEAN_TYPE; + condition.analyze(variables); + condition = condition.cast(variables); boolean continuous = false; @@ -62,16 +63,14 @@ public final class SWhile extends AStatement { } } - int count = 1; - if (block != null) { block.beginLoop = true; block.inLoop = true; - block.analyze(settings, definition, variables); + block.analyze(variables); if (block.loopEscape && !block.anyContinue) { - throw new IllegalArgumentException(error("Extranous while loop.")); + throw new IllegalArgumentException(error("Extraneous while loop.")); } if (continuous && !block.anyBreak) { @@ -79,12 +78,12 @@ public final class SWhile extends AStatement { allEscape = true; } - block.statementCount = Math.max(count, block.statementCount); + block.statementCount = Math.max(1, block.statementCount); } statementCount = 1; - if (settings.getMaxLoopCounter() > 0) { + if (maxLoopCounter > 0) { loopCounterSlot = variables.getVariable(location, "#loop").slot; } @@ -92,30 +91,31 @@ public final class SWhile extends AStatement { } @Override - void write(final CompilerSettings settings, final Definition definition, final MethodWriter adapter) { - writeDebugInfo(adapter); - final Label begin = new Label(); - final Label end = new Label(); + void write(MethodWriter writer) { + writeDebugInfo(writer); - adapter.mark(begin); + Label begin = new Label(); + Label end = new Label(); + + writer.mark(begin); condition.fals = end; - condition.write(settings, definition, adapter); + condition.write(writer); if (block != null) { - adapter.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); + writer.writeLoopCounter(loopCounterSlot, Math.max(1, block.statementCount)); block.continu = begin; block.brake = end; - block.write(settings, definition, adapter); + block.write(writer); } else { - adapter.writeLoopCounter(loopCounterSlot, 1); + writer.writeLoopCounter(loopCounterSlot, 1); } if (block == null || !block.allEscape) { - adapter.goTo(begin); + writer.goTo(begin); } - adapter.mark(end); + writer.mark(end); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index ab6944619ca..47aa868d6dc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -22,6 +22,7 @@ *

* The following are the types of nodes: * A* (abstract) - These are the abstract nodes that are the superclasses for the other types. + * I* (interface) -- Thse are marker interfaces to denote a property of the node. * S* (statement) - These are nodes that represent a statement in Painless. These are the highest level nodes. * E* (expression) - These are nodess that represent an expression in Painless. These are the middle level nodes. * L* (link) - These are nodes that respresent a piece of a variable/method chain. The are the lowest level nodes. @@ -58,21 +59,23 @@ * {@link org.elasticsearch.painless.node.LNewArray} - Represents an array instantiation. * {@link org.elasticsearch.painless.node.LNewObj} - Respresents and object instantiation. * {@link org.elasticsearch.painless.node.LShortcut} - Represents a field load/store shortcut. (Internal only.) + * {@link org.elasticsearch.painless.node.LStatic} - Represents a static type target. * {@link org.elasticsearch.painless.node.LString} - Represents a string constant. * {@link org.elasticsearch.painless.node.LVariable} - Represents a variable load/store. * {@link org.elasticsearch.painless.node.SBlock} - Represents a set of statements as a branch of control-flow. * {@link org.elasticsearch.painless.node.SBreak} - Represents a break statement. + * {@link org.elasticsearch.painless.node.SCatch} - Represents a catch block as part of a try-catch block. * {@link org.elasticsearch.painless.node.SContinue} - Represents a continue statement. * {@link org.elasticsearch.painless.node.SDeclaration} - Represents a single variable declaration. * {@link org.elasticsearch.painless.node.SDeclBlock} - Represents a series of declarations. * {@link org.elasticsearch.painless.node.SDo} - Represents a do-while loop. * {@link org.elasticsearch.painless.node.SExpression} - Represents the top-level node for an expression as a statement. * {@link org.elasticsearch.painless.node.SFor} - Represents a for loop. + * {@link org.elasticsearch.painless.node.SIf} - Represents an if block. * {@link org.elasticsearch.painless.node.SIfElse} - Represents an if/else block. * {@link org.elasticsearch.painless.node.SReturn} - Represents a return statement. * {@link org.elasticsearch.painless.node.SSource} - The root of all Painless trees. Contains a series of statements. * {@link org.elasticsearch.painless.node.SThrow} - Represents a throw statement. - * {@link org.elasticsearch.painless.node.STrap} - Represents a catch block as part of a try-catch block. * {@link org.elasticsearch.painless.node.STry} - Represents the try block as part of a try-catch block. * {@link org.elasticsearch.painless.node.SWhile} - Represents a while loop. *

diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt new file mode 100644 index 00000000000..755c8d44c0f --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt @@ -0,0 +1,1080 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Interfaces + +class Appendable -> java.lang.Appendable { + # append(char/CharSequence): skipped. left to subclasses (e.g. StringBuilder). + Appendable append(CharSequence,int,int) +} + +# AutoCloseable: i/o + +class CharSequence -> java.lang.CharSequence { + char charAt(int) + IntStream chars() + IntStream codePoints() + int length() + CharSequence subSequence(int,int) + String toString() +} + +# Cloneable: add clone() to subclasses directly. + +class Comparable -> java.lang.Comparable { + int compareTo(def) +} + +class Iterable -> java.lang.Iterable { + void forEach(Consumer) + Iterator iterator() + Spliterator spliterator() +} + +# Readable: i/o +# Runnable: threads. +# Thread.UncaughtExceptionHandler: threads. + +#### Classes + +class Boolean -> java.lang.Boolean extends Comparable,Object { + Boolean TRUE + Boolean FALSE + boolean booleanValue() + int compare(boolean,boolean) + int hashCode(boolean) + boolean logicalAnd(boolean,boolean) + boolean logicalOr(boolean,boolean) + boolean logicalXor(boolean,boolean) + boolean parseBoolean(String) + String toString(boolean) + Boolean valueOf(boolean) +} + +class Byte -> java.lang.Byte extends Number,Comparable,Object { + int BYTES + byte MAX_VALUE + byte MIN_VALUE + int SIZE + int compare(byte,byte) + Byte decode(String) + int hashCode(byte) + byte parseByte(String) + byte parseByte(String,int) + String toString(byte) + int toUnsignedInt(byte) + long toUnsignedLong(byte) + Byte valueOf(byte) + Byte valueOf(String,int) +} + +class Character -> java.lang.Character extends Comparable,Object { + int BYTES + byte COMBINING_SPACING_MARK + byte CONNECTOR_PUNCTUATION + byte CONTROL + byte CURRENCY_SYMBOL + byte DASH_PUNCTUATION + byte DECIMAL_DIGIT_NUMBER + byte DIRECTIONALITY_ARABIC_NUMBER + byte DIRECTIONALITY_BOUNDARY_NEUTRAL + byte DIRECTIONALITY_COMMON_NUMBER_SEPARATOR + byte DIRECTIONALITY_EUROPEAN_NUMBER + byte DIRECTIONALITY_EUROPEAN_NUMBER_SEPARATOR + byte DIRECTIONALITY_EUROPEAN_NUMBER_TERMINATOR + byte DIRECTIONALITY_LEFT_TO_RIGHT + byte DIRECTIONALITY_LEFT_TO_RIGHT_EMBEDDING + byte DIRECTIONALITY_LEFT_TO_RIGHT_OVERRIDE + byte DIRECTIONALITY_NONSPACING_MARK + byte DIRECTIONALITY_OTHER_NEUTRALS + byte DIRECTIONALITY_PARAGRAPH_SEPARATOR + byte DIRECTIONALITY_POP_DIRECTIONAL_FORMAT + byte DIRECTIONALITY_RIGHT_TO_LEFT + byte DIRECTIONALITY_RIGHT_TO_LEFT_ARABIC + byte DIRECTIONALITY_RIGHT_TO_LEFT_EMBEDDING + byte DIRECTIONALITY_RIGHT_TO_LEFT_OVERRIDE + byte DIRECTIONALITY_SEGMENT_SEPARATOR + byte DIRECTIONALITY_UNDEFINED + byte DIRECTIONALITY_WHITESPACE + byte ENCLOSING_MARK + byte END_PUNCTUATION + byte FINAL_QUOTE_PUNCTUATION + byte FORMAT + byte INITIAL_QUOTE_PUNCTUATION + byte LETTER_NUMBER + byte LINE_SEPARATOR + byte LOWERCASE_LETTER + byte MATH_SYMBOL + int MAX_CODE_POINT + char MAX_HIGH_SURROGATE + char MAX_LOW_SURROGATE + int MAX_RADIX + char MAX_SURROGATE + char MAX_VALUE + char MIN_CODE_POINT + char MIN_HIGH_SURROGATE + char MIN_LOW_SURROGATE + int MIN_RADIX + int MIN_SUPPLEMENTARY_CODE_POINT + char MIN_SURROGATE + char MIN_VALUE + byte MODIFIER_LETTER + byte MODIFIER_SYMBOL + byte NON_SPACING_MARK + byte OTHER_LETTER + byte OTHER_NUMBER + byte OTHER_PUNCTUATION + byte OTHER_SYMBOL + byte PARAGRAPH_SEPARATOR + byte PRIVATE_USE + int SIZE + byte SPACE_SEPARATOR + byte START_PUNCTUATION + byte SURROGATE + byte TITLECASE_LETTER + byte UNASSIGNED + byte UPPERCASE_LETTER + int charCount(int) + char charValue() + int codePointAt(char[],int,int) + int codePointAt(CharSequence,int) + int codePointBefore(char[],int,int) + int codePointBefore(CharSequence,int) + int codePointCount(CharSequence,int,int) + int compare(char,char) + int digit(int,int) + char forDigit(int,int) + byte getDirectionality(int) + String getName(int) + int getNumericValue(int) + int getType(int) + int hashCode(char) + char highSurrogate(int) + boolean isAlphabetic(int) + boolean isBmpCodePoint(int) + boolean isDefined(int) + boolean isDigit(int) + boolean isHighSurrogate(char) + boolean isIdentifierIgnorable(int) + boolean isIdeographic(int) + boolean isISOControl(int) + boolean isJavaIdentifierPart(int) + boolean isJavaIdentifierStart(int) + boolean isLetter(int) + boolean isLetterOrDigit(int) + boolean isLowerCase(int) + boolean isMirrored(int) + boolean isSpaceChar(int) + boolean isSupplementaryCodePoint(int) + boolean isSurrogate(char) + boolean isSurrogatePair(char,char) + boolean isTitleCase(int) + boolean isUnicodeIdentifierPart(int) + boolean isUnicodeIdentifierStart(int) + boolean isUpperCase(int) + boolean isValidCodePoint(int) + boolean isWhitespace(int) + char lowSurrogate(int) + int offsetByCodePoints(char[],int,int,int,int) + int offsetByCodePoints(CharSequence,int,int) + char reverseBytes(char) + char[] toChars(int) + int toChars(int,char[],int) + int toCodePoint(char,char) + char toLowerCase(char) + String toString(char) + char toTitleCase(char) + char toUpperCase(char) + Character valueOf(char) +} + +class Character.Subset -> java.lang.Character$Subset extends Object { +} + +class Character.UnicodeBlock -> java.lang.Character$UnicodeBlock extends Character.Subset,Object { + Character.UnicodeBlock AEGEAN_NUMBERS + Character.UnicodeBlock ALCHEMICAL_SYMBOLS + Character.UnicodeBlock ALPHABETIC_PRESENTATION_FORMS + Character.UnicodeBlock ANCIENT_GREEK_MUSICAL_NOTATION + Character.UnicodeBlock ANCIENT_GREEK_NUMBERS + Character.UnicodeBlock ANCIENT_SYMBOLS + Character.UnicodeBlock ARABIC + Character.UnicodeBlock ARABIC_EXTENDED_A + Character.UnicodeBlock ARABIC_MATHEMATICAL_ALPHABETIC_SYMBOLS + Character.UnicodeBlock ARABIC_PRESENTATION_FORMS_A + Character.UnicodeBlock ARABIC_PRESENTATION_FORMS_B + Character.UnicodeBlock ARABIC_SUPPLEMENT + Character.UnicodeBlock ARMENIAN + Character.UnicodeBlock ARROWS + Character.UnicodeBlock AVESTAN + Character.UnicodeBlock BALINESE + Character.UnicodeBlock BAMUM + Character.UnicodeBlock BAMUM_SUPPLEMENT + Character.UnicodeBlock BASIC_LATIN + Character.UnicodeBlock BATAK + Character.UnicodeBlock BENGALI + Character.UnicodeBlock BLOCK_ELEMENTS + Character.UnicodeBlock BOPOMOFO + Character.UnicodeBlock BOPOMOFO_EXTENDED + Character.UnicodeBlock BOX_DRAWING + Character.UnicodeBlock BRAHMI + Character.UnicodeBlock BRAILLE_PATTERNS + Character.UnicodeBlock BUGINESE + Character.UnicodeBlock BUHID + Character.UnicodeBlock BYZANTINE_MUSICAL_SYMBOLS + Character.UnicodeBlock CARIAN + Character.UnicodeBlock CHAKMA + Character.UnicodeBlock CHAM + Character.UnicodeBlock CHEROKEE + Character.UnicodeBlock CJK_COMPATIBILITY + Character.UnicodeBlock CJK_COMPATIBILITY_FORMS + Character.UnicodeBlock CJK_COMPATIBILITY_IDEOGRAPHS + Character.UnicodeBlock CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT + Character.UnicodeBlock CJK_RADICALS_SUPPLEMENT + Character.UnicodeBlock CJK_STROKES + Character.UnicodeBlock CJK_SYMBOLS_AND_PUNCTUATION + Character.UnicodeBlock CJK_UNIFIED_IDEOGRAPHS + Character.UnicodeBlock CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A + Character.UnicodeBlock CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B + Character.UnicodeBlock CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C + Character.UnicodeBlock CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D + Character.UnicodeBlock COMBINING_DIACRITICAL_MARKS + Character.UnicodeBlock COMBINING_DIACRITICAL_MARKS_SUPPLEMENT + Character.UnicodeBlock COMBINING_HALF_MARKS + Character.UnicodeBlock COMBINING_MARKS_FOR_SYMBOLS + Character.UnicodeBlock COMMON_INDIC_NUMBER_FORMS + Character.UnicodeBlock CONTROL_PICTURES + Character.UnicodeBlock COPTIC + Character.UnicodeBlock COUNTING_ROD_NUMERALS + Character.UnicodeBlock CUNEIFORM + Character.UnicodeBlock CUNEIFORM_NUMBERS_AND_PUNCTUATION + Character.UnicodeBlock CURRENCY_SYMBOLS + Character.UnicodeBlock CYPRIOT_SYLLABARY + Character.UnicodeBlock CYRILLIC + Character.UnicodeBlock CYRILLIC_EXTENDED_A + Character.UnicodeBlock CYRILLIC_EXTENDED_B + Character.UnicodeBlock CYRILLIC_SUPPLEMENTARY + Character.UnicodeBlock DESERET + Character.UnicodeBlock DEVANAGARI + Character.UnicodeBlock DEVANAGARI_EXTENDED + Character.UnicodeBlock DINGBATS + Character.UnicodeBlock DOMINO_TILES + Character.UnicodeBlock EGYPTIAN_HIEROGLYPHS + Character.UnicodeBlock EMOTICONS + Character.UnicodeBlock ENCLOSED_ALPHANUMERIC_SUPPLEMENT + Character.UnicodeBlock ENCLOSED_ALPHANUMERICS + Character.UnicodeBlock ENCLOSED_CJK_LETTERS_AND_MONTHS + Character.UnicodeBlock ENCLOSED_IDEOGRAPHIC_SUPPLEMENT + Character.UnicodeBlock ETHIOPIC + Character.UnicodeBlock ETHIOPIC_EXTENDED + Character.UnicodeBlock ETHIOPIC_EXTENDED_A + Character.UnicodeBlock ETHIOPIC_SUPPLEMENT + Character.UnicodeBlock GENERAL_PUNCTUATION + Character.UnicodeBlock GEOMETRIC_SHAPES + Character.UnicodeBlock GEORGIAN + Character.UnicodeBlock GEORGIAN_SUPPLEMENT + Character.UnicodeBlock GLAGOLITIC + Character.UnicodeBlock GOTHIC + Character.UnicodeBlock GREEK + Character.UnicodeBlock GREEK_EXTENDED + Character.UnicodeBlock GUJARATI + Character.UnicodeBlock GURMUKHI + Character.UnicodeBlock HALFWIDTH_AND_FULLWIDTH_FORMS + Character.UnicodeBlock HANGUL_COMPATIBILITY_JAMO + Character.UnicodeBlock HANGUL_JAMO + Character.UnicodeBlock HANGUL_JAMO_EXTENDED_A + Character.UnicodeBlock HANGUL_JAMO_EXTENDED_B + Character.UnicodeBlock HANGUL_SYLLABLES + Character.UnicodeBlock HANUNOO + Character.UnicodeBlock HEBREW + Character.UnicodeBlock HIGH_PRIVATE_USE_SURROGATES + Character.UnicodeBlock HIGH_SURROGATES + Character.UnicodeBlock HIRAGANA + Character.UnicodeBlock IDEOGRAPHIC_DESCRIPTION_CHARACTERS + Character.UnicodeBlock IMPERIAL_ARAMAIC + Character.UnicodeBlock INSCRIPTIONAL_PAHLAVI + Character.UnicodeBlock INSCRIPTIONAL_PARTHIAN + Character.UnicodeBlock IPA_EXTENSIONS + Character.UnicodeBlock JAVANESE + Character.UnicodeBlock KAITHI + Character.UnicodeBlock KANA_SUPPLEMENT + Character.UnicodeBlock KANBUN + Character.UnicodeBlock KANGXI_RADICALS + Character.UnicodeBlock KANNADA + Character.UnicodeBlock KATAKANA + Character.UnicodeBlock KATAKANA_PHONETIC_EXTENSIONS + Character.UnicodeBlock KAYAH_LI + Character.UnicodeBlock KHAROSHTHI + Character.UnicodeBlock KHMER + Character.UnicodeBlock KHMER_SYMBOLS + Character.UnicodeBlock LAO + Character.UnicodeBlock LATIN_1_SUPPLEMENT + Character.UnicodeBlock LATIN_EXTENDED_A + Character.UnicodeBlock LATIN_EXTENDED_ADDITIONAL + Character.UnicodeBlock LATIN_EXTENDED_B + Character.UnicodeBlock LATIN_EXTENDED_C + Character.UnicodeBlock LATIN_EXTENDED_D + Character.UnicodeBlock LEPCHA + Character.UnicodeBlock LETTERLIKE_SYMBOLS + Character.UnicodeBlock LIMBU + Character.UnicodeBlock LINEAR_B_IDEOGRAMS + Character.UnicodeBlock LINEAR_B_SYLLABARY + Character.UnicodeBlock LISU + Character.UnicodeBlock LOW_SURROGATES + Character.UnicodeBlock LYCIAN + Character.UnicodeBlock LYDIAN + Character.UnicodeBlock MAHJONG_TILES + Character.UnicodeBlock MALAYALAM + Character.UnicodeBlock MANDAIC + Character.UnicodeBlock MATHEMATICAL_ALPHANUMERIC_SYMBOLS + Character.UnicodeBlock MATHEMATICAL_OPERATORS + Character.UnicodeBlock MEETEI_MAYEK + Character.UnicodeBlock MEETEI_MAYEK_EXTENSIONS + Character.UnicodeBlock MEROITIC_CURSIVE + Character.UnicodeBlock MEROITIC_HIEROGLYPHS + Character.UnicodeBlock MIAO + Character.UnicodeBlock MISCELLANEOUS_MATHEMATICAL_SYMBOLS_A + Character.UnicodeBlock MISCELLANEOUS_MATHEMATICAL_SYMBOLS_B + Character.UnicodeBlock MISCELLANEOUS_SYMBOLS + Character.UnicodeBlock MISCELLANEOUS_SYMBOLS_AND_ARROWS + Character.UnicodeBlock MISCELLANEOUS_SYMBOLS_AND_PICTOGRAPHS + Character.UnicodeBlock MISCELLANEOUS_TECHNICAL + Character.UnicodeBlock MODIFIER_TONE_LETTERS + Character.UnicodeBlock MONGOLIAN + Character.UnicodeBlock MUSICAL_SYMBOLS + Character.UnicodeBlock MYANMAR + Character.UnicodeBlock MYANMAR_EXTENDED_A + Character.UnicodeBlock NEW_TAI_LUE + Character.UnicodeBlock NKO + Character.UnicodeBlock NUMBER_FORMS + Character.UnicodeBlock OGHAM + Character.UnicodeBlock OL_CHIKI + Character.UnicodeBlock OLD_ITALIC + Character.UnicodeBlock OLD_PERSIAN + Character.UnicodeBlock OLD_SOUTH_ARABIAN + Character.UnicodeBlock OLD_TURKIC + Character.UnicodeBlock OPTICAL_CHARACTER_RECOGNITION + Character.UnicodeBlock ORIYA + Character.UnicodeBlock OSMANYA + Character.UnicodeBlock PHAGS_PA + Character.UnicodeBlock PHAISTOS_DISC + Character.UnicodeBlock PHOENICIAN + Character.UnicodeBlock PHONETIC_EXTENSIONS + Character.UnicodeBlock PHONETIC_EXTENSIONS_SUPPLEMENT + Character.UnicodeBlock PLAYING_CARDS + Character.UnicodeBlock PRIVATE_USE_AREA + Character.UnicodeBlock REJANG + Character.UnicodeBlock RUMI_NUMERAL_SYMBOLS + Character.UnicodeBlock RUNIC + Character.UnicodeBlock SAMARITAN + Character.UnicodeBlock SAURASHTRA + Character.UnicodeBlock SHARADA + Character.UnicodeBlock SHAVIAN + Character.UnicodeBlock SINHALA + Character.UnicodeBlock SMALL_FORM_VARIANTS + Character.UnicodeBlock SORA_SOMPENG + Character.UnicodeBlock SPACING_MODIFIER_LETTERS + Character.UnicodeBlock SPECIALS + Character.UnicodeBlock SUNDANESE + Character.UnicodeBlock SUNDANESE_SUPPLEMENT + Character.UnicodeBlock SUPERSCRIPTS_AND_SUBSCRIPTS + Character.UnicodeBlock SUPPLEMENTAL_ARROWS_A + Character.UnicodeBlock SUPPLEMENTAL_ARROWS_B + Character.UnicodeBlock SUPPLEMENTAL_MATHEMATICAL_OPERATORS + Character.UnicodeBlock SUPPLEMENTAL_PUNCTUATION + Character.UnicodeBlock SUPPLEMENTARY_PRIVATE_USE_AREA_A + Character.UnicodeBlock SUPPLEMENTARY_PRIVATE_USE_AREA_B + Character.UnicodeBlock SYLOTI_NAGRI + Character.UnicodeBlock SYRIAC + Character.UnicodeBlock TAGALOG + Character.UnicodeBlock TAGBANWA + Character.UnicodeBlock TAGS + Character.UnicodeBlock TAI_LE + Character.UnicodeBlock TAI_THAM + Character.UnicodeBlock TAI_VIET + Character.UnicodeBlock TAI_XUAN_JING_SYMBOLS + Character.UnicodeBlock TAKRI + Character.UnicodeBlock TAMIL + Character.UnicodeBlock TELUGU + Character.UnicodeBlock THAANA + Character.UnicodeBlock THAI + Character.UnicodeBlock TIBETAN + Character.UnicodeBlock TIFINAGH + Character.UnicodeBlock TRANSPORT_AND_MAP_SYMBOLS + Character.UnicodeBlock UGARITIC + Character.UnicodeBlock UNIFIED_CANADIAN_ABORIGINAL_SYLLABICS + Character.UnicodeBlock UNIFIED_CANADIAN_ABORIGINAL_SYLLABICS_EXTENDED + Character.UnicodeBlock VAI + Character.UnicodeBlock VARIATION_SELECTORS + Character.UnicodeBlock VARIATION_SELECTORS_SUPPLEMENT + Character.UnicodeBlock VEDIC_EXTENSIONS + Character.UnicodeBlock VERTICAL_FORMS + Character.UnicodeBlock YI_RADICALS + Character.UnicodeBlock YI_SYLLABLES + Character.UnicodeBlock YIJING_HEXAGRAM_SYMBOLS + Character.UnicodeBlock forName(String) + Character.UnicodeBlock of(int) +} + +# Class: skipped for obvious reasons +# ClassLoader: ... +# ClassValue: ... +# Compiler: ... + +class Double -> java.lang.Double extends Number,Comparable,Object { + int BYTES + int MAX_EXPONENT + double MAX_VALUE + int MIN_EXPONENT + double MIN_NORMAL + double MIN_VALUE + double NaN + double NEGATIVE_INFINITY + double POSITIVE_INFINITY + int SIZE + int compare(double,double) + long doubleToLongBits(double) + long doubleToRawLongBits(double) + int hashCode(double) + boolean isFinite(double) + boolean isInfinite() + boolean isInfinite(double) + boolean isNaN() + boolean isNaN(double) + double longBitsToDouble(long) + double max(double,double) + double min(double,double) + double parseDouble(String) + double sum(double,double) + String toHexString(double) + String toString(double) + Double valueOf(double) +} + +class Enum -> java.lang.Enum extends Comparable,Object { + String name(); + int ordinal(); +} + +class Float -> java.lang.Float extends Number,Comparable,Object { + int BYTES + int MAX_EXPONENT + float MAX_VALUE + int MIN_EXPONENT + float MIN_NORMAL + float MIN_VALUE + float NaN + float NEGATIVE_INFINITY + float POSITIVE_INFINITY + int SIZE + int compare(float,float) + int floatToIntBits(float) + int floatToRawIntBits(float) + int hashCode(float) + float intBitsToFloat(int) + boolean isFinite(float) + boolean isInfinite() + boolean isInfinite(float) + boolean isNaN() + boolean isNaN(float) + float max(float,float) + float min(float,float) + float parseFloat(String) + float sum(float,float) + String toHexString(float) + String toString(float) + Float valueOf(float) +} + +# InheritableThreadLocal: threads + +class Integer -> java.lang.Integer extends Number,Comparable,Object { + int BYTES + int MAX_VALUE + int MIN_VALUE + int SIZE + int bitCount(int) + int compare(int,int) + int compareUnsigned(int,int) + Integer decode(String) + int divideUnsigned(int,int) + int hashCode(int) + int highestOneBit(int) + int lowestOneBit(int) + int max(int,int) + int min(int,int) + int numberOfLeadingZeros(int) + int numberOfTrailingZeros(int) + int parseInt(String) + int parseInt(String,int) + int parseUnsignedInt(String) + int parseUnsignedInt(String,int) + int remainderUnsigned(int,int) + int reverse(int) + int reverseBytes(int) + int rotateLeft(int,int) + int rotateRight(int,int) + int signum(int) + String toBinaryString(int) + String toHexString(int) + String toOctalString(int) + String toString(int) + String toString(int,int) + long toUnsignedLong(int) + String toUnsignedString(int) + String toUnsignedString(int,int) + Integer valueOf(int) + Integer valueOf(String,int) +} + +class Long -> java.lang.Long extends Number,Comparable,Object { + int BYTES + long MAX_VALUE + long MIN_VALUE + int SIZE + int bitCount(long) + int compare(long,long) + int compareUnsigned(long,long) + Long decode(String) + long divideUnsigned(long,long) + int hashCode(long) + long highestOneBit(long) + long lowestOneBit(long) + long max(long,long) + long min(long,long) + int numberOfLeadingZeros(long) + int numberOfTrailingZeros(long) + long parseLong(String) + long parseLong(String,int) + long parseUnsignedLong(String) + long parseUnsignedLong(String,int) + long remainderUnsigned(long,long) + long reverse(long) + long reverseBytes(long) + long rotateLeft(long,int) + long rotateRight(long,int) + int signum(long) + long sum(long,long) + String toBinaryString(long) + String toHexString(long) + String toOctalString(long) + String toString(long) + String toString(long,int) + String toUnsignedString(long) + String toUnsignedString(long,int) + Long valueOf(long) + Long valueOf(String,int) +} + +class Math -> java.lang.Math { + double E + double PI + double abs(double) + double acos(double) + double asin(double) + double atan(double) + double atan2(double,double) + double cbrt(double) + double ceil(double) + double copySign(double,double) + double cos(double) + double cosh(double) + double exp(double) + double expm1(double) + double floor(double) + double hypot(double,double) + double IEEEremainder(double,double) + double log(double) + double log10(double) + double log1p(double) + double max(double,double) + double min(double,double) + double nextAfter(double,double) + double nextDown(double) + double nextUp(double) + double pow(double,double) + double random() + double rint(double) + long round(double) + double scalb(double,int) + double signum(double) + double sin(double) + double sinh(double) + double sqrt(double) + double tan(double) + double tanh(double) + double toDegrees(double) + double toRadians(double) + double ulp(double) +} + +class Number -> java.lang.Number extends Object { + byte byteValue() + short shortValue() + int intValue() + long longValue() + float floatValue() + double doubleValue() +} + +class Object -> java.lang.Object { + boolean equals(Object) + int hashCode() + String toString() +} + +# Package: skipped +# Process: skipped +# ProcessBuilder: skipped +# ProcessBuilder.Redirect: skipped +# Runtime: skipped +# RuntimePermission: skipped +# SecurityManger: skipped + +class Short -> java.lang.Short extends Number,Comparable,Object { + int BYTES + short MAX_VALUE + short MIN_VALUE + int SIZE + int compare(short,short) + Short decode(String) + int hashCode(short) + short parseShort(String) + short parseShort(String,int) + short reverseBytes(short) + String toString(short) + int toUnsignedInt(short) + long toUnsignedLong(short) + Short valueOf(short) + Short valueOf(String,int) +} + +class StackTraceElement -> java.lang.StackTraceElement extends Object { + StackTraceElement (String,String,String,int) + String getClassName() + String getFileName() + int getLineNumber() + String getMethodName() + boolean isNativeMethod() +} + +class StrictMath -> java.lang.StrictMath { + double E + double PI + double abs(double) + double acos(double) + double asin(double) + double atan(double) + double atan2(double,double) + double cbrt(double) + double ceil(double) + double copySign(double,double) + double cos(double) + double cosh(double) + double exp(double) + double expm1(double) + double floor(double) + double hypot(double,double) + double IEEEremainder(double,double) + double log(double) + double log10(double) + double log1p(double) + double max(double,double) + double min(double,double) + double nextAfter(double,double) + double nextDown(double) + double nextUp(double) + double pow(double,double) + double random() + double rint(double) + long round(double) + double scalb(double,int) + double signum(double) + double sin(double) + double sinh(double) + double sqrt(double) + double tan(double) + double tanh(double) + double toDegrees(double) + double toRadians(double) + double ulp(double) +} + +class String -> java.lang.String extends CharSequence,Comparable,Object { + String () + int codePointAt(int) + int codePointBefore(int) + int codePointCount(int,int) + int compareToIgnoreCase(String) + String concat(String) + boolean contains(CharSequence) + boolean contentEquals(CharSequence) + String copyValueOf(char[]) + String copyValueOf(char[],int,int) + boolean endsWith(String) + boolean equalsIgnoreCase(String) + String format(Locale,String,def[]) + String format(String,def[]) + void getChars(int,int,char[],int) + int indexOf(String) + int indexOf(String,int) + boolean isEmpty() + String join(CharSequence,Iterable) + int lastIndexOf(String) + int lastIndexOf(String,int) + int offsetByCodePoints(int,int) + boolean regionMatches(boolean,int,String,int,int) + boolean regionMatches(int,String,int,int) + String replace(CharSequence,CharSequence) + boolean startsWith(String) + boolean startsWith(String,int) + String substring(int) + String substring(int,int) + char[] toCharArray() + String toLowerCase() + String toLowerCase(Locale) + String toUpperCase() + String toUpperCase(Locale) + String trim() + String valueOf(def) +} + +class StringBuffer -> java.lang.StringBuffer extends CharSequence,Appendable,Object { + StringBuffer () + StringBuffer (CharSequence) + StringBuffer append(def) + StringBuffer append(CharSequence,int,int) + StringBuffer appendCodePoint(int) + int capacity() + int codePointAt(int) + int codePointBefore(int) + int codePointCount(int,int) + StringBuffer delete(int,int) + StringBuffer deleteCharAt(int) + void getChars(int,int,char[],int) + int indexOf(String) + int indexOf(String,int) + StringBuffer insert(int,def) + int lastIndexOf(String) + int lastIndexOf(String,int) + int offsetByCodePoints(int,int) + StringBuffer replace(int,int,String) + StringBuffer reverse() + void setCharAt(int,char) + void setLength(int) + String substring(int) + String substring(int,int) +} + +class StringBuilder -> java.lang.StringBuilder extends CharSequence,Appendable,Object { + StringBuilder () + StringBuilder (CharSequence) + StringBuilder append(def) + StringBuilder append(CharSequence,int,int) + StringBuilder appendCodePoint(int) + int capacity() + int codePointAt(int) + int codePointBefore(int) + int codePointCount(int,int) + StringBuilder delete(int,int) + StringBuilder deleteCharAt(int) + void getChars(int,int,char[],int) + int indexOf(String) + int indexOf(String,int) + StringBuilder insert(int,def) + int lastIndexOf(String) + int lastIndexOf(String,int) + int offsetByCodePoints(int,int) + StringBuilder replace(int,int,String) + StringBuilder reverse() + void setCharAt(int,char) + void setLength(int) + String substring(int) + String substring(int,int) +} + +class System -> java.lang.System extends Object { + void arraycopy(Object,int,Object,int,int) + long currentTimeMillis() + long nanoTime() +} + +# Thread: skipped +# ThreadGroup: skipped +# ThreadLocal: skipped +# Throwable: skipped (reserved for painless, users can only catch Exceptions) + +class Void -> java.lang.Void extends Object { +} + +#### Enums + +class Character.UnicodeScript -> java.lang.Character$UnicodeScript extends Enum,Object { + Character.UnicodeScript ARABIC + Character.UnicodeScript ARMENIAN + Character.UnicodeScript AVESTAN + Character.UnicodeScript BALINESE + Character.UnicodeScript BAMUM + Character.UnicodeScript BATAK + Character.UnicodeScript BENGALI + Character.UnicodeScript BOPOMOFO + Character.UnicodeScript BRAHMI + Character.UnicodeScript BRAILLE + Character.UnicodeScript BUGINESE + Character.UnicodeScript BUHID + Character.UnicodeScript CANADIAN_ABORIGINAL + Character.UnicodeScript CARIAN + Character.UnicodeScript CHAKMA + Character.UnicodeScript CHAM + Character.UnicodeScript CHEROKEE + Character.UnicodeScript COMMON + Character.UnicodeScript COPTIC + Character.UnicodeScript CUNEIFORM + Character.UnicodeScript CYPRIOT + Character.UnicodeScript CYRILLIC + Character.UnicodeScript DESERET + Character.UnicodeScript DEVANAGARI + Character.UnicodeScript EGYPTIAN_HIEROGLYPHS + Character.UnicodeScript ETHIOPIC + Character.UnicodeScript GEORGIAN + Character.UnicodeScript GLAGOLITIC + Character.UnicodeScript GOTHIC + Character.UnicodeScript GREEK + Character.UnicodeScript GUJARATI + Character.UnicodeScript GURMUKHI + Character.UnicodeScript HAN + Character.UnicodeScript HANGUL + Character.UnicodeScript HANUNOO + Character.UnicodeScript HEBREW + Character.UnicodeScript HIRAGANA + Character.UnicodeScript IMPERIAL_ARAMAIC + Character.UnicodeScript INHERITED + Character.UnicodeScript INSCRIPTIONAL_PAHLAVI + Character.UnicodeScript INSCRIPTIONAL_PARTHIAN + Character.UnicodeScript JAVANESE + Character.UnicodeScript KAITHI + Character.UnicodeScript KANNADA + Character.UnicodeScript KATAKANA + Character.UnicodeScript KAYAH_LI + Character.UnicodeScript KHAROSHTHI + Character.UnicodeScript KHMER + Character.UnicodeScript LAO + Character.UnicodeScript LATIN + Character.UnicodeScript LEPCHA + Character.UnicodeScript LIMBU + Character.UnicodeScript LINEAR_B + Character.UnicodeScript LISU + Character.UnicodeScript LYCIAN + Character.UnicodeScript LYDIAN + Character.UnicodeScript MALAYALAM + Character.UnicodeScript MANDAIC + Character.UnicodeScript MEETEI_MAYEK + Character.UnicodeScript MEROITIC_CURSIVE + Character.UnicodeScript MEROITIC_HIEROGLYPHS + Character.UnicodeScript MIAO + Character.UnicodeScript MONGOLIAN + Character.UnicodeScript MYANMAR + Character.UnicodeScript NEW_TAI_LUE + Character.UnicodeScript NKO + Character.UnicodeScript OGHAM + Character.UnicodeScript OL_CHIKI + Character.UnicodeScript OLD_ITALIC + Character.UnicodeScript OLD_PERSIAN + Character.UnicodeScript OLD_SOUTH_ARABIAN + Character.UnicodeScript OLD_TURKIC + Character.UnicodeScript ORIYA + Character.UnicodeScript OSMANYA + Character.UnicodeScript PHAGS_PA + Character.UnicodeScript PHOENICIAN + Character.UnicodeScript REJANG + Character.UnicodeScript RUNIC + Character.UnicodeScript SAMARITAN + Character.UnicodeScript SAURASHTRA + Character.UnicodeScript SHARADA + Character.UnicodeScript SHAVIAN + Character.UnicodeScript SINHALA + Character.UnicodeScript SORA_SOMPENG + Character.UnicodeScript SUNDANESE + Character.UnicodeScript SYLOTI_NAGRI + Character.UnicodeScript SYRIAC + Character.UnicodeScript TAGALOG + Character.UnicodeScript TAGBANWA + Character.UnicodeScript TAI_LE + Character.UnicodeScript TAI_THAM + Character.UnicodeScript TAI_VIET + Character.UnicodeScript TAKRI + Character.UnicodeScript TAMIL + Character.UnicodeScript TELUGU + Character.UnicodeScript THAANA + Character.UnicodeScript THAI + Character.UnicodeScript TIBETAN + Character.UnicodeScript TIFINAGH + Character.UnicodeScript UGARITIC + Character.UnicodeScript UNKNOWN + Character.UnicodeScript VAI + Character.UnicodeScript YI + Character.UnicodeScript forName(String) + Character.UnicodeScript of(int) + Character.UnicodeScript valueOf(String) + Character.UnicodeScript[] values() +} + +#### Exceptions + +class ArithmeticException -> java.lang.ArithmeticException extends RuntimeException,Exception,Object { + ArithmeticException () + ArithmeticException (String) +} + +class ArrayIndexOutOfBoundsException -> java.lang.ArrayIndexOutOfBoundsException extends IndexOutOfBoundsException,RuntimeException,Exception,Object { + ArrayIndexOutOfBoundsException () + ArrayIndexOutOfBoundsException (String) +} + +class ArrayStoreException -> java.lang.ArrayStoreException extends RuntimeException,Exception,Object { + ArrayStoreException () + ArrayStoreException (String) +} + +class ClassCastException -> java.lang.ClassCastException extends RuntimeException,Exception,Object { + ClassCastException () + ClassCastException (String) +} + +class ClassNotFoundException -> java.lang.ClassNotFoundException extends ReflectiveOperationException,Exception,Object { + ClassNotFoundException () + ClassNotFoundException (String) +} + +class CloneNotSupportedException -> java.lang.CloneNotSupportedException extends Exception,Object { + CloneNotSupportedException () + CloneNotSupportedException (String) +} + +class EnumConstantNotPresentException -> java.lang.EnumConstantNotPresentException extends RuntimeException,Exception,Object { + String constantName() +} + +class Exception -> java.lang.Exception extends Object { + Exception () + Exception (String) + String getLocalizedMessage() + String getMessage() + StackTraceElement[] getStackTrace() +} + +class IllegalAccessException -> java.lang.IllegalAccessException extends ReflectiveOperationException,Exception,Object { + IllegalAccessException () + IllegalAccessException (String) +} + +class IllegalArgumentException -> java.lang.IllegalArgumentException extends RuntimeException,Exception,Object { + IllegalArgumentException () + IllegalArgumentException (String) +} + +class IllegalMonitorStateException -> java.lang.IllegalMonitorStateException extends RuntimeException,Exception,Object { + IllegalMonitorStateException () + IllegalMonitorStateException (String) +} + +class IllegalStateException -> java.lang.IllegalStateException extends RuntimeException,Exception,Object { + IllegalStateException () + IllegalStateException (String) +} + +class IllegalThreadStateException -> java.lang.IllegalThreadStateException extends IllegalArgumentException,RuntimeException,Exception,Object { + IllegalThreadStateException () + IllegalThreadStateException (String) +} + +class IndexOutOfBoundsException -> java.lang.IndexOutOfBoundsException extends RuntimeException,Exception,Object { + IndexOutOfBoundsException () + IndexOutOfBoundsException (String) +} + +class InstantiationException -> java.lang.InstantiationException extends ReflectiveOperationException,Exception,Object { + InstantiationException () + InstantiationException (String) +} + +class InterruptedException -> java.lang.InterruptedException extends Exception,Object { + InterruptedException () + InterruptedException (String) +} + +class NegativeArraySizeException -> java.lang.NegativeArraySizeException extends RuntimeException,Exception,Object { + NegativeArraySizeException () + NegativeArraySizeException (String) +} + +class NoSuchFieldException -> java.lang.NoSuchFieldException extends ReflectiveOperationException,Exception,Object { + NoSuchFieldException () + NoSuchFieldException (String) +} + +class NoSuchMethodException -> java.lang.NoSuchMethodException extends ReflectiveOperationException,Exception,Object { + NoSuchMethodException () + NoSuchMethodException (String) +} + +class NullPointerException -> java.lang.NullPointerException extends RuntimeException,Exception,Object { + NullPointerException () + NullPointerException (String) +} + +class NumberFormatException -> java.lang.NumberFormatException extends RuntimeException,Exception,Object { + NumberFormatException () + NumberFormatException (String) +} + +class ReflectiveOperationException -> java.lang.ReflectiveOperationException extends Exception,Object { + ReflectiveOperationException () + ReflectiveOperationException (String) +} + +class RuntimeException -> java.lang.RuntimeException extends Exception,Object { + RuntimeException () + RuntimeException (String) +} + +class SecurityException -> java.lang.SecurityException extends RuntimeException,Exception,Object { + SecurityException () + SecurityException (String) +} + +class StringIndexOutOfBoundsException -> java.lang.StringIndexOutOfBoundsException extends IndexOutOfBoundsException,RuntimeException,Exception,Object { + StringIndexOutOfBoundsException () + StringIndexOutOfBoundsException (String) +} + +class TypeNotPresentException -> java.lang.TypeNotPresentException extends RuntimeException,Exception,Object { + String typeName() +} + +class UnsupportedOperationException -> java.lang.UnsupportedOperationException extends RuntimeException,Exception,Object { + UnsupportedOperationException () + UnsupportedOperationException (String) +} + diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.math.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.math.txt new file mode 100644 index 00000000000..42680f8a428 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.math.txt @@ -0,0 +1,149 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Classes + +class BigDecimal -> java.math.BigDecimal extends Number,Comparable,Object { + BigDecimal ONE + BigDecimal TEN + BigDecimal ZERO + BigDecimal (String) + BigDecimal (String,MathContext) + BigDecimal abs() + BigDecimal abs(MathContext) + BigDecimal add(BigDecimal) + BigDecimal add(BigDecimal,MathContext) + byte byteValueExact() + BigDecimal divide(BigDecimal) + BigDecimal divide(BigDecimal,MathContext) + BigDecimal[] divideAndRemainder(BigDecimal) + BigDecimal[] divideAndRemainder(BigDecimal,MathContext) + BigDecimal divideToIntegralValue(BigDecimal) + BigDecimal divideToIntegralValue(BigDecimal,MathContext) + int intValueExact() + long longValueExact() + BigDecimal max(BigDecimal) + BigDecimal min(BigDecimal) + BigDecimal movePointLeft(int) + BigDecimal movePointRight(int) + BigDecimal multiply(BigDecimal) + BigDecimal multiply(BigDecimal,MathContext) + BigDecimal negate() + BigDecimal negate(MathContext) + BigDecimal plus() + BigDecimal plus(MathContext) + BigDecimal pow(int) + BigDecimal pow(int,MathContext) + int precision() + BigDecimal remainder(BigDecimal) + BigDecimal remainder(BigDecimal,MathContext) + BigDecimal round(MathContext) + int scale() + BigDecimal scaleByPowerOfTen(int) + BigDecimal setScale(int) + BigDecimal setScale(int,RoundingMode) + short shortValueExact() + int signum() + BigDecimal stripTrailingZeros() + BigDecimal subtract(BigDecimal) + BigDecimal subtract(BigDecimal,MathContext) + BigInteger toBigInteger() + BigInteger toBigIntegerExact() + String toEngineeringString() + String toPlainString() + BigDecimal ulp() + BigDecimal valueOf(double) +} + +class BigInteger -> java.math.BigInteger extends Number,Comparable,Object { + BigInteger ONE + BigInteger TEN + BigInteger ZERO + BigInteger (String) + BigInteger (String,int) + BigInteger abs() + BigInteger add(BigInteger) + BigInteger and(BigInteger) + BigInteger andNot(BigInteger) + int bitCount() + int bitLength() + byte byteValueExact() + BigInteger clearBit(int) + BigInteger divide(BigInteger) + BigInteger[] divideAndRemainder(BigInteger) + BigInteger flipBit(int) + BigInteger gcd(BigInteger) + int getLowestSetBit() + int intValueExact() + long longValueExact() + BigInteger max(BigInteger) + BigInteger min(BigInteger) + BigInteger mod(BigInteger) + BigInteger modInverse(BigInteger) + BigInteger modPow(BigInteger,BigInteger) + BigInteger multiply(BigInteger) + BigInteger negate() + BigInteger not() + BigInteger or(BigInteger) + BigInteger pow(int) + BigInteger remainder(BigInteger) + BigInteger setBit(int) + BigInteger shiftLeft(int) + BigInteger shiftRight(int) + short shortValueExact() + int signum() + BigInteger subtract(BigInteger) + boolean testBit(int) + byte[] toByteArray() + String toString(int) + BigInteger valueOf(long) + BigInteger xor(BigInteger) +} + +class MathContext -> java.math.MathContext extends Object { + MathContext DECIMAL128 + MathContext DECIMAL32 + MathContext DECIMAL64 + MathContext UNLIMITED + MathContext (int) + MathContext (int,RoundingMode) + int getPrecision() + RoundingMode getRoundingMode() +} + +#### Enums + +class RoundingMode -> java.math.RoundingMode extends Enum,Object { + RoundingMode CEILING + RoundingMode DOWN + RoundingMode FLOOR + RoundingMode HALF_DOWN + RoundingMode HALF_EVEN + RoundingMode HALF_UP + RoundingMode UNNECESSARY + RoundingMode UP + RoundingMode valueOf(String) + RoundingMode[] values() +} + diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.text.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.text.txt new file mode 100644 index 00000000000..eac725cfb94 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.text.txt @@ -0,0 +1,486 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Interfaces + +class AttributedCharacterIterator -> java.text.AttributedCharacterIterator extends CharacterIterator { + Set getAllAttributeKeys() + def getAttribute(AttributedCharacterIterator.Attribute) + Map getAttributes() + int getRunLimit() + int getRunLimit(Set) + int getRunStart() + int getRunStart(Set) +} + +class CharacterIterator -> java.text.CharacterIterator { + char DONE + def clone() + char current() + char first() + int getBeginIndex() + int getEndIndex() + int getIndex() + char last() + char next() + char previous() + char setIndex(int) +} + +#### Classes + +class Annotation -> java.text.Annotation extends Object { + Annotation (Object) + def getValue() +} + +class AttributedCharacterIterator.Attribute -> java.text.AttributedCharacterIterator$Attribute extends Object { + AttributedCharacterIterator.Attribute INPUT_METHOD_SEGMENT + AttributedCharacterIterator.Attribute LANGUAGE + AttributedCharacterIterator.Attribute READING +} + +class AttributedString -> java.text.AttributedString extends Object { + AttributedString (String) + AttributedString (String,Map) + void addAttribute(AttributedCharacterIterator.Attribute,Object) + void addAttribute(AttributedCharacterIterator.Attribute,Object,int,int) + void addAttributes(Map,int,int) + AttributedCharacterIterator getIterator() + AttributedCharacterIterator getIterator(AttributedCharacterIterator.Attribute[]) + AttributedCharacterIterator getIterator(AttributedCharacterIterator.Attribute[],int,int) +} + +class Bidi -> java.text.Bidi extends Object { + int DIRECTION_DEFAULT_LEFT_TO_RIGHT + int DIRECTION_DEFAULT_RIGHT_TO_LEFT + int DIRECTION_LEFT_TO_RIGHT + int DIRECTION_RIGHT_TO_LEFT + Bidi (AttributedCharacterIterator) + Bidi (char[],int,byte[],int,int,int) + Bidi (String,int) + boolean baseIsLeftToRight() + Bidi createLineBidi(int,int) + int getBaseLevel() + int getLength() + int getLevelAt(int) + int getRunCount() + int getRunLevel(int) + int getRunLimit(int) + int getRunStart(int) + boolean isLeftToRight() + boolean isMixed() + boolean isRightToLeft() + void reorderVisually(byte[],int,Object[],int,int) + boolean requiresBidi(char[],int,int) +} + +class BreakIterator -> java.text.BreakIterator extend Object { + int DONE + def clone() + int current() + int first() + int following(int) + Locale[] getAvailableLocales() + BreakIterator getCharacterInstance() + BreakIterator getCharacterInstance(Locale) + BreakIterator getLineInstance() + BreakIterator getLineInstance(Locale) + BreakIterator getSentenceInstance() + BreakIterator getSentenceInstance(Locale) + CharacterIterator getText() + BreakIterator getWordInstance() + BreakIterator getWordInstance(Locale) + boolean isBoundary(int) + int last() + int next() + int next(int) + int preceding(int) + int previous() + void setText(String) +} + +class ChoiceFormat -> java.text.ChoiceFormat extends NumberFormat,Format,Object { + ChoiceFormat (double[],String[]) + ChoiceFormat (String) + void applyPattern(String) + def[] getFormats() + double[] getLimits() + double nextDouble(double) + double nextDouble(double,boolean) + double previousDouble(double) + void setChoices(double[],String[]) + String toPattern() +} + +class CollationElementIterator -> java.text.CollationElementIterator extends Object { + int NULLORDER + int getMaxExpansion(int) + int getOffset() + int next() + int previous() + int primaryOrder(int) + void reset() + short secondaryOrder(int) + void setOffset(int) + void setText(String) + short tertiaryOrder(int) +} + +class CollationKey -> java.text.CollationKey extends Comparable,Object { + String getSourceString() + byte[] toByteArray() +} + +class Collator -> java.text.Collator extends Comparator,Object { + int CANONICAL_DECOMPOSITION + int FULL_DECOMPOSITION + int IDENTICAL + int NO_DECOMPOSITION + int PRIMARY + int SECONDARY + int TERTIARY + def clone() + boolean equals(String,String) + Locale[] getAvailableLocales() + CollationKey getCollationKey(String) + int getDecomposition() + Collator getInstance() + Collator getInstance(Locale) + int getStrength() + void setDecomposition(int) + void setStrength(int) +} + +class DateFormat -> java.text.DateFormat extends Format,Object { + int AM_PM_FIELD + int DATE_FIELD + int DAY_OF_WEEK_FIELD + int DAY_OF_WEEK_IN_MONTH_FIELD + int DAY_OF_YEAR_FIELD + int DEFAULT + int ERA_FIELD + int FULL + int HOUR_OF_DAY0_FIELD + int HOUR_OF_DAY1_FIELD + int HOUR0_FIELD + int HOUR1_FIELD + int LONG + int MEDIUM + int MILLISECOND_FIELD + int MINUTE_FIELD + int MONTH_FIELD + int SECOND_FIELD + int SHORT + int TIMEZONE_FIELD + int WEEK_OF_MONTH_FIELD + int WEEK_OF_YEAR_FIELD + int YEAR_FIELD + Locale[] getAvailableLocales() + Calendar getCalendar() + DateFormat getDateInstance() + DateFormat getDateInstance(int) + DateFormat getDateInstance(int,Locale) + DateFormat getDateTimeInstance() + DateFormat getDateTimeInstance(int,int) + DateFormat getDateTimeInstance(int,int,Locale) + DateFormat getInstance() + NumberFormat getNumberFormat() + DateFormat getTimeInstance() + DateFormat getTimeInstance(int) + DateFormat getTimeInstance(int,Locale) + TimeZone getTimeZone() + boolean isLenient() + Date parse(String) + Date parse(String,ParsePosition) + void setCalendar(Calendar) + void setLenient(boolean) + void setNumberFormat(NumberFormat) + void setTimeZone(TimeZone) +} + +class DateFormat.Field -> java.text.DateFormat$Field extends Format.Field,AttributedCharacterIterator.Attribute,Object { + DateFormat.Field AM_PM + DateFormat.Field DAY_OF_MONTH + DateFormat.Field DAY_OF_WEEK + DateFormat.Field DAY_OF_WEEK_IN_MONTH + DateFormat.Field DAY_OF_YEAR + DateFormat.Field ERA + DateFormat.Field HOUR_OF_DAY0 + DateFormat.Field HOUR_OF_DAY1 + DateFormat.Field HOUR0 + DateFormat.Field HOUR1 + DateFormat.Field MILLISECOND + DateFormat.Field MINUTE + DateFormat.Field MONTH + DateFormat.Field SECOND + DateFormat.Field TIME_ZONE + DateFormat.Field WEEK_OF_MONTH + DateFormat.Field WEEK_OF_YEAR + DateFormat.Field YEAR + int getCalendarField() + DateFormat.Field ofCalendarField(int) +} + +class DateFormatSymbols -> java.text.DateFormatSymbols extends Object { + DateFormatSymbols () + DateFormatSymbols (Locale) + def clone() + String[] getAmPmStrings() + Locale[] getAvailableLocales() + String[] getEras() + DateFormatSymbols getInstance() + DateFormatSymbols getInstance(Locale) + String getLocalPatternChars() + String[] getMonths() + String[] getShortMonths() + String[] getShortWeekdays() + String[] getWeekdays() + String[][] getZoneStrings() + int hashCode() + void setAmPmStrings(String[]) + void setEras(String[]) + void setLocalPatternChars(String) + void setMonths(String[]) + void setShortMonths(String[]) + void setShortWeekdays(String[]) + void setWeekdays(String[]) + void setZoneStrings(String[][]) +} + +class DecimalFormat -> java.text.DecimalFormat extends NumberFormat,Format,Object { + DecimalFormat () + DecimalFormat (String) + DecimalFormat (String,DecimalFormatSymbols) + void applyLocalizedPattern(String) + void applyPattern(String) + DecimalFormatSymbols getDecimalFormatSymbols() + int getGroupingSize() + int getMultiplier() + String getNegativePrefix() + String getNegativeSuffix() + String getPositivePrefix() + String getPositiveSuffix() + boolean isDecimalSeparatorAlwaysShown() + boolean isParseBigDecimal() + void setDecimalFormatSymbols(DecimalFormatSymbols) + void setDecimalSeparatorAlwaysShown(boolean) + void setGroupingSize(int) + void setMultiplier(int) + void setNegativePrefix(String) + void setNegativeSuffix(String) + void setPositivePrefix(String) + void setPositiveSuffix(String) + void setParseBigDecimal(boolean) + String toLocalizedPattern() + String toPattern() +} + +class DecimalFormatSymbols -> java.text.DecimalFormatSymbols extends Object { + DecimalFormatSymbols () + DecimalFormatSymbols (Locale) + def clone() + Locale[] getAvailableLocales() + Currency getCurrency() + String getCurrencySymbol() + char getDecimalSeparator() + char getDigit() + String getExponentSeparator() + char getGroupingSeparator() + String getInfinity() + DecimalFormatSymbols getInstance() + DecimalFormatSymbols getInstance(Locale) + String getInternationalCurrencySymbol() + char getMinusSign() + char getMonetaryDecimalSeparator() + String getNaN() + char getPatternSeparator() + char getPercent() + char getPerMill() + char getZeroDigit() + void setCurrency(Currency) + void setCurrencySymbol(String) + void setDecimalSeparator(char) + void setDigit(char) + void setExponentSeparator(String) + void setGroupingSeparator(char) + void setInfinity(String) + void setInternationalCurrencySymbol(String) + void setMinusSign(char) + void setMonetaryDecimalSeparator(char) + void setNaN(String) + void setPatternSeparator(char) + void setPercent(char) + void setPerMill(char) + void setZeroDigit(char) +} + +class FieldPosition -> java.text.FieldPosition extends Object { + FieldPosition (int) + FieldPosition (Format.Field,int) + int getBeginIndex() + int getEndIndex() + int getField() + Format.Field getFieldAttribute() + void setBeginIndex(int) + void setEndIndex(int) +} + +class Format -> java.text.Format extends Object { + def clone() + String format(Object) + StringBuffer format(Object,StringBuffer,FieldPosition) + AttributedCharacterIterator formatToCharacterIterator(Object) + Object parseObject(String) + Object parseObject(String,ParsePosition) +} + +class Format.Field -> java.text.Format$Field extends AttributedCharacterIterator.Attribute,Object { +} + +class MessageFormat -> java.text.MessageFormat extends Format,Object { + void applyPattern(String) + String format(String,Object[]) + Format[] getFormats() + Format[] getFormatsByArgumentIndex() + Locale getLocale() + Object[] parse(String) + Object[] parse(String,ParsePosition) + void setFormat(int,Format) + void setFormatByArgumentIndex(int,Format) + void setFormats(Format[]) + void setFormatsByArgumentIndex(Format[]) + void setLocale(Locale) + String toPattern() +} + +class MessageFormat.Field -> java.text.MessageFormat$Field extends Format.Field,AttributedCharacterIterator.Attribute,Object { + MessageFormat.Field ARGUMENT +} + +class Normalizer -> java.text.Normalizer extends Object { + boolean isNormalized(CharSequence,Normalizer.Form) + String normalize(CharSequence,Normalizer.Form) +} + +class NumberFormat -> java.text.NumberFormat extends Format,Object { + int FRACTION_FIELD + int INTEGER_FIELD + Locale[] getAvailableLocales() + Currency getCurrency() + NumberFormat getCurrencyInstance() + NumberFormat getCurrencyInstance(Locale) + NumberFormat getInstance() + NumberFormat getInstance(Locale) + NumberFormat getIntegerInstance() + NumberFormat getIntegerInstance(Locale) + int getMaximumFractionDigits() + int getMaximumIntegerDigits() + int getMinimumFractionDigits() + int getMinimumIntegerDigits() + NumberFormat getNumberInstance() + NumberFormat getNumberInstance(Locale) + NumberFormat getPercentInstance() + NumberFormat getPercentInstance(Locale) + RoundingMode getRoundingMode() + boolean isGroupingUsed() + boolean isParseIntegerOnly() + Number parse(String) + Number parse(String,ParsePosition) + void setCurrency(Currency) + void setGroupingUsed(boolean) + void setMaximumFractionDigits(int) + void setMaximumIntegerDigits(int) + void setMinimumFractionDigits(int) + void setMinimumIntegerDigits(int) + void setParseIntegerOnly(boolean) + void setRoundingMode(RoundingMode) +} + +class NumberFormat.Field -> java.text.NumberFormat$Field extends Format.Field,AttributedCharacterIterator.Attribute,Object { + NumberFormat.Field CURRENCY + NumberFormat.Field DECIMAL_SEPARATOR + NumberFormat.Field EXPONENT + NumberFormat.Field EXPONENT_SIGN + NumberFormat.Field EXPONENT_SYMBOL + NumberFormat.Field FRACTION + NumberFormat.Field GROUPING_SEPARATOR + NumberFormat.Field INTEGER + NumberFormat.Field PERCENT + NumberFormat.Field PERMILLE + NumberFormat.Field SIGN +} + +class ParsePosition -> java.text.ParsePosition extends Object { + ParsePosition (int) + int getErrorIndex() + int getIndex() + void setErrorIndex(int) + void setIndex(int) +} + +class RuleBasedCollator -> java.text.RuleBasedCollator extends Collator,Comparator,Object { + RuleBasedCollator (String) + CollationElementIterator getCollationElementIterator(String) + String getRules() +} + +class SimpleDateFormat -> java.text.SimpleDateFormat extends DateFormat,Format,Object { + SimpleDateFormat () + SimpleDateFormat (String) + SimpleDateFormat (String,Locale) + void applyLocalizedPattern(String) + void applyPattern(String) + Date get2DigitYearStart() + DateFormatSymbols getDateFormatSymbols() + void setDateFormatSymbols(DateFormatSymbols) + void set2DigitYearStart(Date) + String toLocalizedPattern() + String toPattern() +} + +class StringCharacterIterator -> java.text.StringCharacterIterator extends CharacterIterator,Object { + StringCharacterIterator (String) + StringCharacterIterator (String,int) + StringCharacterIterator (String,int,int,int) + void setText(String) +} + +#### Enums + +class Normalizer.Form -> java.text.Normalizer$Form extends Enum,Object { + Normalizer.Form NFC + Normalizer.Form NFD + Normalizer.Form NFKC + Normalizer.Form NFKD + Normalizer.Form valueOf(String) + Normalizer.Form[] values() +} + +#### Exceptions + +class ParseException -> java.text.ParseException extends Exception,Object { + ParseException (String,int) + int getErrorOffset() +} diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.function.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.function.txt new file mode 100644 index 00000000000..969a8d6fb46 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.function.txt @@ -0,0 +1,232 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Interfaces + +class BiConsumer -> java.util.function.BiConsumer { + void accept(def,def) + BiConsumer andThen(BiConsumer) +} + +class BiFunction -> java.util.function.BiFunction { + BiFunction andThen(Function) + def apply(def,def) +} + +class BinaryOperator -> java.util.function.BinaryOperator extends BiFunction { + BinaryOperator maxBy(Comparator) + BinaryOperator minBy(Comparator) +} + +class BiPredicate -> java.util.function.BiPredicate { + BiPredicate and(BiPredicate) + BiPredicate negate() + BiPredicate or(BiPredicate) + boolean test(def,def) +} + +class BooleanSupplier -> java.util.function.BooleanSupplier { + boolean getAsBoolean() +} + +class Consumer -> java.util.function.Consumer { + void accept(def) + Consumer andThen(Consumer) +} + +class DoubleBinaryOperator -> java.util.function.DoubleBinaryOperator { + double applyAsDouble(double,double) +} + +class DoubleConsumer -> java.util.function.DoubleConsumer { + void accept(double) + DoubleConsumer andThen(DoubleConsumer) +} + +class DoubleFunction -> java.util.function.DoubleFunction { + def apply(double) +} + +class DoublePredicate -> java.util.function.DoublePredicate { + DoublePredicate and(DoublePredicate) + DoublePredicate negate() + DoublePredicate or(DoublePredicate) + boolean test(double) +} + +class DoubleSupplier -> java.util.function.DoubleSupplier { + double getAsDouble() +} + +class DoubleToIntFunction -> java.util.function.DoubleToIntFunction { + int applyAsInt(double) +} + +class DoubleToLongFunction -> java.util.function.DoubleToLongFunction { + long applyAsLong(double) +} + +class DoubleUnaryOperator -> java.util.function.DoubleUnaryOperator { + DoubleUnaryOperator andThen(DoubleUnaryOperator) + double applyAsDouble(double) + DoubleUnaryOperator compose(DoubleUnaryOperator) + DoubleUnaryOperator identity() +} + +class Function -> java.util.function.Function { + Function andThen(Function) + def apply(def) + Function compose(Function) + Function identity() +} + +class IntBinaryOperator -> java.util.function.IntBinaryOperator { + int applyAsInt(int,int) +} + +class IntConsumer -> java.util.function.IntConsumer { + void accept(int) + IntConsumer andThen(IntConsumer) +} + +class IntFunction -> java.util.function.IntFunction { + def apply(int) +} + +class IntPredicate -> java.util.function.IntPredicate { + IntPredicate and(IntPredicate) + IntPredicate negate() + IntPredicate or(IntPredicate) + boolean test(int) +} + +class IntSupplier -> java.util.function.IntSupplier { + int getAsInt() +} + +class IntToDoubleFunction -> java.util.function.IntToDoubleFunction { + double applyAsDouble(int) +} + +class IntToLongFunction -> java.util.function.IntToLongFunction { + long applyAsLong(int) +} + +class IntUnaryOperator -> java.util.function.IntUnaryOperator { + IntUnaryOperator andThen(IntUnaryOperator) + int applyAsInt(int) + IntUnaryOperator compose(IntUnaryOperator) + IntUnaryOperator identity() +} + +class LongBinaryOperator -> java.util.function.LongBinaryOperator { + long applyAsLong(long,long) +} + +class LongConsumer -> java.util.function.LongConsumer { + void accept(long) + LongConsumer andThen(LongConsumer) +} + +class LongFunction -> java.util.function.LongFunction { + def apply(long) +} + +class LongPredicate -> java.util.function.LongPredicate { + LongPredicate and(LongPredicate) + LongPredicate negate() + LongPredicate or(LongPredicate) + boolean test(long) +} + +class LongSupplier -> java.util.function.LongSupplier { + long getAsLong() +} + +class LongToDoubleFunction -> java.util.function.LongToDoubleFunction { + double applyAsDouble(long) +} + +class LongToIntFunction -> java.util.function.LongToIntFunction { + int applyAsInt(long) +} + +class LongUnaryOperator -> java.util.function.LongUnaryOperator { + LongUnaryOperator andThen(LongUnaryOperator) + long applyAsLong(long) + LongUnaryOperator compose(LongUnaryOperator) + LongUnaryOperator identity() +} + +class ObjDoubleConsumer -> java.util.function.ObjDoubleConsumer { + void accept(def,double) +} + +class ObjIntConsumer -> java.util.function.ObjIntConsumer { + void accept(def,int) +} + +class ObjLongConsumer -> java.util.function.ObjLongConsumer { + void accept(def,long) +} + +class Predicate -> java.util.function.Predicate { + Predicate and(Predicate) + Predicate isEqual(def) + Predicate negate() + Predicate or(Predicate) + boolean test(def) +} + +class Supplier -> java.util.function.Supplier { + def get() +} + +class ToDoubleBiFunction -> java.util.function.ToDoubleBiFunction { + double applyAsDouble(def,def) +} + +class ToDoubleFunction -> java.util.function.ToDoubleFunction { + double applyAsDouble(def) +} + +class ToIntBiFunction -> java.util.function.ToIntBiFunction { + int applyAsInt(def,def) +} + +class ToIntFunction -> java.util.function.ToIntFunction { + int applyAsInt(def) +} + +class ToLongBiFunction -> java.util.function.ToLongBiFunction { + long applyAsLong(def,def) +} + +class ToLongFunction -> java.util.function.ToLongFunction { + long applyAsLong(def) +} + +class UnaryOperator -> java.util.function.UnaryOperator extends Function { + UnaryOperator identity() +} diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.stream.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.stream.txt new file mode 100644 index 00000000000..d24cf8c0424 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.stream.txt @@ -0,0 +1,273 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Interfaces + +class BaseStream -> java.util.stream.BaseStream { + void close() + boolean isParallel() + Iterator iterator() + BaseStream sequential() + Spliterator spliterator() + BaseStream unordered() +} + +class Collector -> java.util.stream.Collector { + BiConsumer accumulator() + Set characteristics() + BinaryOperator combiner() + Function finisher() + Collector of(Supplier,BiConsumer,BinaryOperator,Function,Collector.Characteristics[]) + Collector of(Supplier,BiConsumer,BinaryOperator,Collector.Characteristics[]) + Supplier supplier() +} + +class DoubleStream -> java.util.stream.DoubleStream extends BaseStream { + boolean allMatch(DoublePredicate) + boolean anyMatch(DoublePredicate) + OptionalDouble average() + Stream boxed() + DoubleStream.Builder builder() + def collect(Supplier,ObjDoubleConsumer,BiConsumer) + DoubleStream concat(DoubleStream,DoubleStream) + long count() + DoubleStream distinct() + DoubleStream empty() + DoubleStream filter(DoublePredicate) + OptionalDouble findAny() + OptionalDouble findFirst() + DoubleStream flatMap(DoubleFunction) + void forEach(DoubleConsumer) + void forEachOrdered(DoubleConsumer) + PrimitiveIterator.OfDouble iterator() + DoubleStream limit(long) + DoubleStream map(DoubleUnaryOperator) + IntStream mapToInt(DoubleToIntFunction) + LongStream mapToLong(DoubleToLongFunction) + Stream mapToObj(DoubleFunction) + OptionalDouble max() + OptionalDouble min() + boolean noneMatch(DoublePredicate) + DoubleStream of(double[]) + DoubleStream peek(DoubleConsumer) + OptionalDouble reduce(DoubleBinaryOperator) + double reduce(double,DoubleBinaryOperator) + DoubleStream sequential() + DoubleStream skip(long) + DoubleStream sorted() + Spliterator.OfDouble spliterator() + double sum() + DoubleSummaryStatistics summaryStatistics() + double[] toArray() +} + +class DoubleStream.Builder -> java.util.stream.DoubleStream$Builder extends DoubleConsumer { + DoubleStream.Builder add(double) + DoubleStream build() +} + +class IntStream -> java.util.stream.IntStream extends BaseStream { + boolean allMatch(IntPredicate) + boolean anyMatch(IntPredicate) + DoubleStream asDoubleStream() + LongStream asLongStream() + OptionalDouble average() + Stream boxed() + IntStream.Builder builder() + def collect(Supplier,ObjIntConsumer,BiConsumer) + IntStream concat(IntStream,IntStream) + long count() + IntStream distinct() + IntStream empty() + IntStream filter(IntPredicate) + OptionalInt findAny() + OptionalInt findFirst() + IntStream flatMap(IntFunction) + void forEach(IntConsumer) + void forEachOrdered(IntConsumer) + PrimitiveIterator.OfInt iterator() + IntStream limit(long) + IntStream map(IntUnaryOperator) + DoubleStream mapToDouble(IntToDoubleFunction) + LongStream mapToLong(IntToLongFunction) + Stream mapToObj(IntFunction) + OptionalInt max() + OptionalInt min() + boolean noneMatch(IntPredicate) + IntStream of(int[]) + IntStream peek(IntConsumer) + IntStream range(int,int) + IntStream rangeClosed(int,int) + OptionalInt reduce(IntBinaryOperator) + int reduce(int,IntBinaryOperator) + IntStream sequential() + IntStream skip(long) + IntStream sorted() + Spliterator.OfInt spliterator() + int sum() + IntSummaryStatistics summaryStatistics() + int[] toArray() +} + +class IntStream.Builder -> java.util.stream.IntStream$Builder extends IntConsumer { + IntStream.Builder add(int) + IntStream build() +} + +class LongStream -> java.util.stream.LongStream extends BaseStream { + boolean allMatch(LongPredicate) + boolean anyMatch(LongPredicate) + DoubleStream asDoubleStream() + OptionalDouble average() + Stream boxed() + LongStream.Builder builder() + def collect(Supplier,ObjLongConsumer,BiConsumer) + LongStream concat(LongStream,LongStream) + long count() + LongStream distinct() + LongStream empty() + LongStream filter(LongPredicate) + OptionalLong findAny() + OptionalLong findFirst() + LongStream flatMap(LongFunction) + void forEach(LongConsumer) + void forEachOrdered(LongConsumer) + PrimitiveIterator.OfLong iterator() + LongStream limit(long) + LongStream map(LongUnaryOperator) + DoubleStream mapToDouble(LongToDoubleFunction) + IntStream mapToInt(LongToIntFunction) + Stream mapToObj(LongFunction) + OptionalLong max() + OptionalLong min() + boolean noneMatch(LongPredicate) + LongStream of(long[]) + LongStream peek(LongConsumer) + LongStream range(long,long) + LongStream rangeClosed(long,long) + OptionalLong reduce(LongBinaryOperator) + long reduce(long,LongBinaryOperator) + LongStream sequential() + LongStream skip(long) + LongStream sorted() + Spliterator.OfLong spliterator() + long sum() + LongSummaryStatistics summaryStatistics() + long[] toArray() +} + +class LongStream.Builder -> java.util.stream.LongStream$Builder extends LongConsumer { + LongStream.Builder add(long) + LongStream build() +} + +class Stream -> java.util.stream.Stream extends BaseStream { + boolean allMatch(Predicate) + boolean anyMatch(Predicate) + Stream.Builder builder() + def collect(Collector) + def collect(Supplier,BiConsumer,BiConsumer) + Stream concat(Stream,Stream) + long count() + Stream distinct() + Stream empty() + Stream filter(Predicate) + Optional findAny() + Optional findFirst() + Stream flatMap(Function) + DoubleStream flatMapToDouble(Function) + IntStream flatMapToInt(Function) + LongStream flatMapToLong(Function) + void forEach(Consumer) + void forEachOrdered(Consumer) + Stream limit(long) + Stream map(Function) + DoubleStream mapToDouble(ToDoubleFunction) + IntStream mapToInt(ToIntFunction) + LongStream mapToLong(ToLongFunction) + Optional max(Comparator) + Optional min(Comparator) + boolean noneMatch(Predicate) + Stream of(def[]) + Stream peek(Consumer) + Optional reduce(BinaryOperator) + def reduce(def,BinaryOperator) + def reduce(def,BiFunction,BinaryOperator) + Stream skip(long) + Stream sorted() + Stream sorted(Comparator) + def[] toArray() + def[] toArray(IntFunction) +} + +class Stream.Builder -> java.util.stream.Stream$Builder extends Consumer { + Stream.Builder add(def) + Stream build() +} + +#### Classes + +class Collectors -> java.util.stream.Collectors extends Object { + Collector averagingDouble(ToDoubleFunction) + Collector averagingInt(ToIntFunction) + Collector averagingLong(ToLongFunction) + Collector collectingAndThen(Collector,Function) + Collector counting() + Collector groupingBy(Function) + Collector groupingBy(Function,Collector) + Collector groupingBy(Function,Supplier,Collector) + Collector joining() + Collector joining(CharSequence) + Collector joining(CharSequence,CharSequence,CharSequence) + Collector mapping(Function,Collector) + Collector maxBy(Comparator) + Collector minBy(Comparator) + Collector partitioningBy(Predicate) + Collector partitioningBy(Predicate,Collector) + Collector reducing(BinaryOperator) + Collector reducing(def,BinaryOperator) + Collector reducing(def,Function,BinaryOperator) + Collector summarizingDouble(ToDoubleFunction) + Collector summarizingInt(ToIntFunction) + Collector summarizingLong(ToLongFunction) + Collector summingDouble(ToDoubleFunction) + Collector summingInt(ToIntFunction) + Collector summingLong(ToLongFunction) + Collector toCollection(Supplier) + Collector toList() + Collector toMap(Function,Function) + Collector toMap(Function,Function,BinaryOperator) + Collector toMap(Function,Function,BinaryOperator,Supplier) + Collector toSet() +} + +#### Enums + +class Collector.Characteristics -> java.util.stream.Collector$Characteristics extends Enum,Object { + Collector.Characteristics CONCURRENT + Collector.Characteristics IDENTITY_FINISH + Collector.Characteristics UNORDERED + Collector.Characteristics valueOf(String) + Collector.Characteristics[] values() +} diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt new file mode 100644 index 00000000000..9be890f15d5 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt @@ -0,0 +1,1148 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Interfaces + +class Collection -> java.util.Collection extends Iterable { + boolean add(def) + boolean addAll(Collection) + void clear() + boolean contains(def) + boolean containsAll(Collection) + boolean isEmpty() + boolean removeAll(Collection) + boolean removeIf(Predicate) + boolean retainAll(Collection) + int size() + Spliterator spliterator() + Stream stream() + def[] toArray() + def[] toArray(def[]) +} + +class Comparator -> java.util.Comparator { + int compare(def,def) + Comparator comparing(Function) + Comparator comparing(Function,Comparator) + Comparator comparingDouble(ToDoubleFunction) + Comparator comparingInt(ToIntFunction) + Comparator comparingLong(ToLongFunction) + boolean equals(Object) + Comparator naturalOrder() + Comparator nullsFirst(Comparator) + Comparator nullsLast(Comparator) + Comparator reversed() + Comparator reverseOrder() + Comparator thenComparing(Comparator) + Comparator thenComparing(Function,Comparator) + Comparator thenComparingDouble(ToDoubleFunction) + Comparator thenComparingInt(ToIntFunction) + Comparator thenComparingLong(ToLongFunction) +} + +class Deque -> java.util.Deque extends Queue,Collection,Iterable { + void addFirst(def) + void addLast(def) + Iterator descendingIterator() + def getFirst() + def getLast() + boolean offerFirst(def) + boolean offerLast(def) + def peekFirst() + def peekLast() + def pollFirst() + def pollLast() + def pop() + void push(def) + boolean remove(def) + def removeFirst() + boolean removeFirstOccurrence(def) + def removeLast() + boolean removeLastOccurrence(def) +} + +class Enumeration -> java.util.Enumeration { + boolean hasMoreElements() + def nextElement() +} + +class EventListener -> java.util.EventListener { +} + +class Formattable -> java.util.Formattable { + void formatTo(Formatter,int,int,int) +} + +class Iterator -> java.util.Iterator { + void forEachRemaining(Consumer) + boolean hasNext() + def next() + void remove() +} + +class List -> java.util.List extends Collection,Iterable { + void add(int,def) + boolean addAll(int,Collection) + boolean equals(Object) + def get(int) + int hashCode() + int indexOf(def) + int lastIndexOf(def) + ListIterator listIterator() + ListIterator listIterator(int) + def remove(int) + void replaceAll(UnaryOperator) + def set(int,def) + # TODO: wtf? + int getLength/size() + void sort(Comparator) + List subList(int,int) +} + +class ListIterator -> java.util.ListIterator extends Iterator { + void add(def) + boolean hasPrevious() + int nextIndex() + int previousIndex() + void set(def) +} + +class Map -> java.util.Map { + void clear() + def compute(def,BiFunction) + def computeIfAbsent(def,Function) + def computeIfPresent(def,BiFunction) + boolean containsKey(def) + boolean containsValue(def) + Set entrySet() + boolean equals(Object) + void forEach(BiConsumer) + def get(def) + def getOrDefault(def,def) + boolean isEmpty() + Set keySet() + def merge(def,def,BiFunction) + def put(def,def) + void putAll(Map) + def putIfAbsent(def,def) + def remove(def) + boolean remove(def,def) + def replace(def,def) + boolean replace(def,def,def) + void replaceAll(BiFunction) + int size() + Collection values() +} + +class Map.Entry -> java.util.Map$Entry { + Comparator comparingByKey() + Comparator comparingByKey(Comparator) + Comparator comparingByValue() + Comparator comparingByValue(Comparator) + boolean equals(Object) + def getKey() + def getValue() + int hashCode() + def setValue(def) +} + +class NavigableMap -> java.util.NavigableMap extends SortedMap,Map { + Map.Entry ceilingEntry(def) + def ceilingKey(def) + NavigableSet descendingKeySet() + NavigableMap descendingMap() + Map.Entry firstEntry() + Map.Entry floorEntry(def) + def floorKey(def) + NavigableMap headMap(def,boolean) + Map.Entry higherEntry(def) + def higherKey(def) + Map.Entry lastEntry() + Map.Entry lowerEntry(def) + NavigableSet navigableKeySet() + Map.Entry pollFirstEntry() + Map.Entry pollLastEntry() + NavigableMap subMap(def,boolean,def,boolean) + NavigableMap tailMap(def,boolean) +} + +class NavigableSet -> java.util.NavigableSet extends SortedSet,Set,Collection,Iterable { + def ceiling(def) + Iterator descendingIterator() + NavigableSet descendingSet() + def floor(def) + NavigableSet headSet(def,boolean) + def higher(def) + def lower(def) + def pollFirst() + def pollLast() + NavigableSet subSet(def,boolean,def,boolean) + NavigableSet tailSet(def,boolean) +} + +class Observer -> java.util.Observer { + void update(Observable,Object) +} + +class PrimitiveIterator -> java.util.PrimitiveIterator extends Iterator { + void forEachRemaining(def) +} + +class PrimitiveIterator.OfDouble -> java.util.PrimitiveIterator$OfDouble extends PrimitiveIterator,Iterator { + Double next() + double nextDouble() +} + +class PrimitiveIterator.OfInt -> java.util.PrimitiveIterator$OfInt extends PrimitiveIterator,Iterator { + Integer next() + int nextInt() +} + +class PrimitiveIterator.OfLong -> java.util.PrimitiveIterator$OfLong extends PrimitiveIterator,Iterator { + Long next() + long nextLong() +} + +class Spliterator -> java.util.Spliterator { + int CONCURRENT + int DISTINCT + int IMMUTABLE + int NONNULL + int ORDERED + int SIZED + int SORTED + int SUBSIZED + int characteristics() + long estimateSize() + void forEachRemaining(Consumer) + Comparator getComparator() + long getExactSizeIfKnown() + boolean hasCharacteristics(int) + boolean tryAdvance(Consumer) + Spliterator trySplit() +} + +class Spliterator.OfPrimitive -> java.util.Spliterator$OfPrimitive extends Spliterator { + void forEachRemaining(def) + boolean tryAdvance(def) + Spliterator.OfPrimitive trySplit() +} + +class Spliterator.OfDouble -> java.util.Spliterator$OfDouble extends Spliterator.OfPrimitive,Spliterator { + Spliterator.OfDouble trySplit() +} + +class Spliterator.OfInt -> java.util.Spliterator$OfInt extends Spliterator.OfPrimitive,Spliterator { + Spliterator.OfInt trySplit() +} + +class Spliterator.OfLong -> java.util.Spliterator$OfLong extends Spliterator.OfPrimitive,Spliterator { + Spliterator.OfLong trySplit() +} + +class Queue -> java.util.Queue extends Collection,Iterable { + def element() + boolean offer(def) + def peek() + def poll() + def remove() +} + +class RandomAccess -> java.util.RandomAccess { +} + +class Set -> java.util.Set extends Collection,Iterable { + boolean equals(Object) + int hashCode() + boolean remove(def) +} + +class SortedMap -> java.util.SortedMap extends Map { + Comparator comparator() + def firstKey() + SortedMap headMap(def) + def lastKey() + SortedMap subMap(def,def) + SortedMap tailMap(def) +} + +class SortedSet -> java.util.SortedSet extends Set,Collection,Iterable { + Comparator comparator() + def first() + SortedSet headSet(def) + def last() + SortedSet subSet(def,def) + SortedSet tailSet(def) +} + +#### Classes + +class AbstractCollection -> java.util.AbstractCollection extends Collection,Iterable,Object { +} + +class AbstractList -> java.util.AbstractList extends AbstractCollection,List,Collection,Iterable,Object { +} + +class AbstractMap -> java.util.AbstractMap extends Map,Object { +} + +class AbstractMap.SimpleEntry -> java.util.AbstractMap$SimpleEntry extends Map.Entry,Object { + AbstractMap.SimpleEntry (def,def) + AbstractMap.SimpleEntry (Map.Entry) +} + +class AbstractMap.SimpleImmutableEntry -> java.util.AbstractMap$SimpleImmutableEntry extends Map.Entry,Object { + AbstractMap.SimpleImmutableEntry (def,def) + AbstractMap.SimpleImmutableEntry (Map.Entry) +} + +class AbstractQueue -> java.util.AbstractQueue extends AbstractCollection,Queue,Collection,Iterable,Object { +} + +class AbstractSequentialList -> java.util.AbstractSequentialList extends AbstractList,AbstractCollection,List,Collection,Iterable,Object { +} + +class AbstractSet -> java.util.AbstractSet extends AbstractCollection,Set,Collection,Iterable,Object { +} + +class ArrayDeque -> java.util.ArrayDeque extends AbstractCollection,Deque,Queue,Collection,Iterable,Object { + ArrayDeque () + ArrayDeque (Collection) + ArrayDeque clone() +} + +class ArrayList -> java.util.ArrayList extends AbstractList,AbstractCollection,List,RandomAccess,Collection,Iterable,Object { + ArrayList () + ArrayList (Collection) + def clone() + void trimToSize() +} + +class Arrays -> java.util.Arrays extends Object { + List asList(Object[]) + boolean deepEquals(Object[],Object[]) + int deepHashCode(Object[]) + String deepToString(Object[]) +} + +class Base64 -> java.util.Base64 extends Object { + Base64.Decoder getDecoder() + Base64.Encoder getEncoder() + Base64.Decoder getMimeDecoder() + Base64.Encoder getMimeEncoder() + Base64.Encoder getMimeEncoder(int,byte[]) + Base64.Decoder getUrlDecoder() + Base64.Encoder getUrlEncoder() +} + +class Base64.Decoder -> java.util.Base64$Decoder extends Object { + int decode(byte[],byte[]) + byte[] decode(String) +} + +class Base64.Encoder -> java.util.Base64$Encoder extends Object { + int encode(byte[],byte[]) + String encodeToString(byte[]) + Base64.Encoder withoutPadding() +} + +class BitSet -> java.util.BitSet extends Object { + BitSet () + BitSet (int) + void and(BitSet) + void andNot(BitSet) + int cardinality() + void clear() + void clear(int) + void clear(int,int) + def clone() + void flip(int) + void flip(int,int) + boolean intersects(BitSet) + boolean isEmpty() + int length() + int nextClearBit(int) + int nextSetBit(int) + void or(BitSet) + int previousClearBit(int) + int previousSetBit(int) + void set(int) + void set(int,int) + void set(int,int,boolean) + int size() + byte[] toByteArray() + long[] toLongArray() + BitSet valueOf(long[]) + void xor(BitSet) +} + +class Calendar -> java.util.Calendar extends Comparable,Object { + int ALL_STYLES + int AM + int AM_PM + int APRIL + int AUGUST + int DATE + int DAY_OF_MONTH + int DAY_OF_WEEK + int DAY_OF_WEEK_IN_MONTH + int DAY_OF_YEAR + int DECEMBER + int DST_OFFSET + int ERA + int FEBRUARY + int FIELD_COUNT + int FRIDAY + int HOUR + int HOUR_OF_DAY + int JANUARY + int JULY + int JUNE + int LONG + int LONG_FORMAT + int LONG_STANDALONE + int MARCH + int MAY + int MILLISECOND + int MINUTE + int MONDAY + int MONTH + int NARROW_FORMAT + int NARROW_STANDALONE + int NOVEMBER + int OCTOBER + int PM + int SATURDAY + int SECOND + int SEPTEMBER + int SHORT + int SHORT_FORMAT + int SHORT_STANDALONE + int SUNDAY + int THURSDAY + int TUESDAY + int UNDECIMBER + int WEDNESDAY + int WEEK_OF_MONTH + int WEEK_OF_YEAR + int YEAR + int ZONE_OFFSET + void add(int,int) + boolean after(Object) + boolean before(Object) + void clear() + void clear(int) + def clone() + int get(int) + int getActualMaximum(int) + int getActualMinimum(int) + Set getAvailableCalendarTypes() + Locale[] getAvailableLocales() + String getCalendarType() + String getDisplayName(int,int,Locale) + Map getDisplayNames(int,int,Locale) + int getFirstDayOfWeek() + int getGreatestMinimum(int) + Calendar getInstance() + Calendar getInstance(TimeZone) + Calendar getInstance(TimeZone,Locale) + int getLeastMaximum(int) + int getMaximum(int) + int getMinimalDaysInFirstWeek() + int getMinimum(int) + Date getTime() + long getTimeInMillis() + TimeZone getTimeZone() + int getWeeksInWeekYear() + int getWeekYear() + boolean isLenient() + boolean isSet(int) + boolean isWeekDateSupported() + void roll(int,int) + void set(int,int) + void set(int,int,int) + void set(int,int,int,int,int) + void set(int,int,int,int,int,int) + void setFirstDayOfWeek(int) + void setLenient(boolean) + void setMinimalDaysInFirstWeek(int) + void setTime(Date) + void setTimeInMillis(long) + void setTimeZone(TimeZone) + void setWeekDate(int,int,int) +} + +class Calendar.Builder -> java.util.Calendar$Builder extends Object { + Calendar.Builder () + Calendar build() + Calendar.Builder set(int,int) + Calendar.Builder setCalendarType(String) + Calendar.Builder setDate(int,int,int) + Calendar.Builder setFields(int[]) + Calendar.Builder setInstant(long) + Calendar.Builder setLenient(boolean) + Calendar.Builder setLocale(Locale) + Calendar.Builder setTimeOfDay(int,int,int) + Calendar.Builder setTimeOfDay(int,int,int,int) + Calendar.Builder setTimeZone(TimeZone) + Calendar.Builder setWeekDate(int,int,int) + Calendar.Builder setWeekDefinition(int,int) +} + +class Collections -> java.util.Collections extends Object { + List EMPTY_LIST + Map EMPTY_MAP + Set EMPTY_SET + boolean addAll(Collection,def[]) + Queue asLifoQueue(Deque) + int binarySearch(List,def) + int binarySearch(List,def,Comparator) + void copy(List,List) + boolean disjoint(Collection,Collection) + Enumeration emptyEnumeration() + Iterator emptyIterator() + List emptyList() + ListIterator emptyListIterator() + Map emptyMap() + NavigableMap emptyNavigableMap() + NavigableSet emptyNavigableSet() + Set emptySet() + SortedMap emptySortedMap() + SortedSet emptySortedSet() + Enumeration enumeration(Collection) + void fill(List,def) + int frequency(Collection,def) + int indexOfSubList(List,List) + int lastIndexOfSubList(List,List) + ArrayList list(Enumeration) + def max(Collection) + def max(Collection,Comparator) + def min(Collection) + def min(Collection,Comparator) + List nCopies(int,def) + Set newSetFromMap(Map) + boolean replaceAll(List,def,def) + void reverse(List) + Comparator reverseOrder() + Comparator reverseOrder(Comparator) + void rotate(List,int) + void shuffle(List) + void shuffle(List,Random) + Set singleton(def) + List singletonList(def) + Map singletonMap(def,def) + void sort(List) + void sort(List,Comparator) + void swap(List,int,int) + Collection unmodifiableCollection(Collection) + List unmodifiableList(List) + Map unmodifiableMap(Map) + NavigableMap unmodifiableNavigableMap(NavigableMap) + NavigableSet unmodifiableNavigableSet(NavigableSet) + Set unmodifiableSet(Set) + SortedMap unmodifiableSortedMap(SortedMap) + SortedSet unmodifiableSortedSet(SortedSet) +} + +class Currency -> java.util.Currency extends Object { + Set getAvailableCurrencies() + String getCurrencyCode() + int getDefaultFractionDigits() + String getDisplayName() + String getDisplayName(Locale) + Currency getInstance(String) + int getNumericCode() + String getSymbol() + String getSymbol(Locale) +} + +class Date -> java.util.Date extends Comparable,Object { + Date () + Date (long) + boolean after(Date) + boolean before(Date) + def clone() + long getTime() + void setTime(long) +} + +class Dictionary -> java.util.Dictionary extends Object { + Enumeration elements() + def get(def) + boolean isEmpty() + Enumeration keys() + def put(def,def) + def remove(def) + int size() +} + +class DoubleSummaryStatistics -> java.util.DoubleSummaryStatistics extends DoubleConsumer,Object { + DoubleSummaryStatistics () + void combine(DoubleSummaryStatistics) + double getAverage() + long getCount() + double getMax() + double getMin() + double getSum() +} + +class EventListenerProxy -> java.util.EventListenerProxy extends EventListener,Object { + EventListener getListener() +} + +class EventObject -> java.util.EventObject extends Object { + EventObject (Object) + Object getSource() +} + +class FormattableFlags -> java.util.FormattableFlags extends Object { + int ALTERNATE + int LEFT_JUSTIFY + int UPPERCASE +} + +class Formatter -> java.util.Formatter extends Object { + Formatter () + Formatter (Appendable) + Formatter (Appendable,Locale) + Formatter format(Locale,String,def[]) + Formatter format(String,def[]) + Locale locale() + Appendable out() +} + +class GregorianCalendar -> java.util.GregorianCalendar extends Calendar,Comparable,Object { + int AD + int BC + GregorianCalendar () + GregorianCalendar (int,int,int) + GregorianCalendar (int,int,int,int,int) + GregorianCalendar (int,int,int,int,int,int) + GregorianCalendar (TimeZone) + GregorianCalendar (TimeZone,Locale) + Date getGregorianChange() + boolean isLeapYear(int) + void setGregorianChange(Date) +} + +class HashMap -> java.util.HashMap extends AbstractMap,Map,Object { + HashMap () + HashMap (Map) + def clone() +} + +class HashSet -> java.util.HashSet extends AbstractSet,Set,Collection,Iterable,Object { + HashSet () + HashSet (Collection) + def clone() +} + +class Hashtable -> java.util.Hashtable extends Dictionary,Map,Object { + Hashtable () + Hashtable (Map) + def clone() +} + +class IdentityHashMap -> java.util.IdentityHashMap extends AbstractMap,Map,Object { + IdentityHashMap () + IdentityHashMap (Map) + def clone() +} + +class IntSummaryStatistics -> java.util.IntSummaryStatistics extends IntConsumer,Object { + IntSummaryStatistics () + void combine(IntSummaryStatistics) + double getAverage() + long getCount() + int getMax() + int getMin() + long getSum() +} + +class LinkedHashMap -> java.util.LinkedHashMap extends HashMap,AbstractMap,Map,Object { + LinkedHashMap () + LinkedHashMap (Map) +} + +class LinkedHashSet -> java.util.LinkedHashSet extends HashSet,AbstractSet,Set,AbstractCollection,Collection,Iterable,Object { + LinkedHashSet () + LinkedHashSet (Collection) +} + +class LinkedList -> java.util.LinkedList extends AbstractSequentialList,AbstractList,List,Deque,Queue,AbstractCollection,Collection,Iterable,Object { + LinkedList () + LinkedList (Collection) + def clone() +} + +class Locale -> java.util.Locale extends Object { + Locale CANADA + Locale CANADA_FRENCH + Locale CHINA + Locale CHINESE + Locale ENGLISH + Locale FRANCE + Locale FRENCH + Locale GERMAN + Locale GERMANY + Locale ITALIAN + Locale ITALY + Locale JAPAN + Locale JAPANESE + Locale KOREA + Locale KOREAN + Locale PRC + char PRIVATE_USE_EXTENSION + Locale ROOT + Locale SIMPLIFIED_CHINESE + Locale TAIWAN + Locale TRADITIONAL_CHINESE + Locale UK + char UNICODE_LOCALE_EXTENSION + Locale US + Locale (String) + Locale (String,String) + Locale (String,String,String) + def clone() + List filter(List,Collection) + List filterTags(List,Collection) + Locale forLanguageTag(String) + Locale[] getAvailableLocales() + String getCountry() + Locale getDefault() + Locale getDefault(Locale.Category) + String getDisplayCountry() + String getDisplayCountry(Locale) + String getDisplayLanguage() + String getDisplayLanguage(Locale) + String getDisplayName() + String getDisplayName(Locale) + String getDisplayScript() + String getDisplayScript(Locale) + String getDisplayVariant() + String getDisplayVariant(Locale) + String getExtension(char) + Set getExtensionKeys() + String getISO3Country() + String getISO3Language() + String[] getISOCountries() + String[] getISOLanguages() + String getLanguage() + String getScript() + Set getUnicodeLocaleAttributes() + Set getUnicodeLocaleKeys() + String getUnicodeLocaleType(String) + String getVariant() + boolean hasExtensions() + Locale lookup(List,Collection) + String lookupTag(List,Collection) + Locale stripExtensions() + String toLanguageTag() +} + +class Locale.Builder -> java.util.Locale$Builder extends Object { + Locale.Builder () + Locale.Builder addUnicodeLocaleAttribute(String) + Locale build() + Locale.Builder clear() + Locale.Builder clearExtensions() + Locale.Builder removeUnicodeLocaleAttribute(String) + Locale.Builder setExtension(char,String) + Locale.Builder setLanguage(String) + Locale.Builder setLanguageTag(String) + Locale.Builder setLocale(Locale) + Locale.Builder setRegion(String) + Locale.Builder setScript(String) + Locale.Builder setUnicodeLocaleKeyword(String,String) + Locale.Builder setVariant(String) +} + +class Locale.LanguageRange -> java.util.Locale$LanguageRange extends Object { + double MAX_WEIGHT + double MIN_WEIGHT + Locale.LanguageRange (String) + Locale.LanguageRange (String,double) + String getRange() + double getWeight() + List mapEquivalents(List,Map) + List parse(String) + List parse(String,Map) +} + +class LongSummaryStatistics -> java.util.LongSummaryStatistics extends LongConsumer,Object { + LongSummaryStatistics () + void combine(LongSummaryStatistics) + double getAverage() + long getCount() + long getMax() + long getMin() + long getSum() +} + +class Objects -> java.util.Objects extends Object { + int compare(def,def,Comparator) + boolean deepEquals(Object,Object) + boolean equals(Object,Object) + int hash(Object[]) + int hashCode(Object) + boolean isNull(Object) + boolean nonNull(Object) + def requireNonNull(def) + def requireNonNull(def,String) + String toString(Object) + String toString(Object,String) +} + +class Observable -> java.util.Observable extends Object { + Observable () + void addObserver(Observer) + int countObservers() + void deleteObserver(Observer) + void deleteObservers() + boolean hasChanged() + void notifyObservers() + void notifyObservers(Object) +} + +class Optional -> java.util.Optional extends Object { + Optional empty() + Optional filter(Predicate) + Optional flatMap(Function) + def get() + void ifPresent(Consumer) + boolean isPresent() + Optional map(Function) + Optional of(def) + Optional ofNullable(def) + def orElse(def) + def orElseGet(Supplier) + def orElseThrow(Supplier) +} + +class OptionalDouble -> java.util.OptionalDouble extends Object { + OptionalDouble empty() + double getAsDouble() + void ifPresent(DoubleConsumer) + boolean isPresent() + OptionalDouble of(double) + double orElse(double) + double orElseGet(DoubleSupplier) + double orElseThrow(Supplier) +} + +class OptionalInt -> java.util.OptionalInt extends Object { + OptionalInt empty() + int getAsInt() + void ifPresent(IntConsumer) + boolean isPresent() + OptionalInt of(int) + int orElse(int) + int orElseGet(IntSupplier) + int orElseThrow(Supplier) +} + +class OptionalLong -> java.util.OptionalLong extends Object { + OptionalLong empty() + long getAsLong() + void ifPresent(LongConsumer) + boolean isPresent() + OptionalLong of(long) + long orElse(long) + long orElseGet(LongSupplier) + long orElseThrow(Supplier) +} + +class PriorityQueue -> java.util.PriorityQueue extends AbstractQueue,Queue,AbstractCollection,Collection,Iterable,Object { + PriorityQueue () + PriorityQueue (Comparator) +} + +class Random -> java.util.Random extends Object { + Random () + Random (long) + DoubleStream doubles(long) + DoubleStream doubles(long,double,double) + IntStream ints(long) + IntStream ints(long,int,int) + LongStream longs(long) + LongStream longs(long,long,long) + boolean nextBoolean() + void nextBytes(byte[]) + double nextDouble() + float nextFloat() + double nextGaussian() + int nextInt() + int nextInt(int) + long nextLong() + void setSeed(long) +} + +class SimpleTimeZone -> java.util.SimpleTimeZone extends TimeZone,Object { + int STANDARD_TIME + int UTC_TIME + int WALL_TIME + SimpleTimeZone (int,String) + SimpleTimeZone (int,String,int,int,int,int,int,int,int,int) + SimpleTimeZone (int,String,int,int,int,int,int,int,int,int,int) + SimpleTimeZone (int,String,int,int,int,int,int,int,int,int,int,int,int) + int getDSTSavings() + void setDSTSavings(int) + void setEndRule(int,int,int) + void setEndRule(int,int,int,int) + void setEndRule(int,int,int,int,boolean) + void setStartRule(int,int,int) + void setStartRule(int,int,int,int) + void setStartRule(int,int,int,int,boolean) + void setStartYear(int) +} + +class Spliterators -> java.util.Spliterators extends Object { + Spliterator.OfDouble emptyDoubleSpliterator() + Spliterator.OfInt emptyIntSpliterator() + Spliterator.OfLong emptyLongSpliterator() + Spliterator emptySpliterator() + Iterator iterator(Spliterator) + Spliterator spliterator(Collection,int) + Spliterator spliterator(Iterator,long,int) + Spliterator spliteratorUnknownSize(Iterator,int) +} + +class Stack -> java.util.Stack extends Vector,AbstractList,List,AbstractCollection,Collection,Iterable,RandomAccess,Object { + Stack () + def push(def) + def pop() + def peek() + boolean empty() + int search(def) +} + +class StringJoiner -> java.util.StringJoiner extends Object { + StringJoiner (CharSequence) + StringJoiner (CharSequence,CharSequence,CharSequence) + StringJoiner add(CharSequence) + int length() + StringJoiner merge(StringJoiner) + StringJoiner setEmptyValue(CharSequence) +} + +class StringTokenizer -> java.util.StringTokenizer extends Enumeration,Object { + StringTokenizer (String) + StringTokenizer (String,String) + StringTokenizer (String,String,boolean) + int countTokens() + boolean hasMoreTokens() + String nextToken() + String nextToken(String) +} + +class TimeZone -> java.util.TimeZone extends Object { + int LONG + int SHORT + def clone() + String[] getAvailableIDs() + String[] getAvailableIDs(int) + TimeZone getDefault() + String getDisplayName() + String getDisplayName(boolean,int) + String getDisplayName(boolean,int,Locale) + String getDisplayName(Locale) + int getDSTSavings() + String getID() + int getOffset(int,int,int,int,int,int) + int getOffset(long) + int getRawOffset() + TimeZone getTimeZone(String) + boolean hasSameRules(TimeZone) + boolean inDaylightTime(Date) + boolean observesDaylightTime() + boolean useDaylightTime() +} + +class TreeMap -> java.util.TreeMap extends AbstractMap,NavigableMap,SortedMap,Map,Object { + TreeMap () + TreeMap (Comparator) + def clone() +} + +class TreeSet -> java.util.TreeSet extends AbstractSet,NavigableSet,SortedSet,Set,AbstractCollection,Collection,Iterable,Object { + TreeSet () + TreeSet (Comparator) + def clone() +} + +class UUID -> java.util.UUID extends Comparable,Object { + UUID (long,long) + int clockSequence() + UUID fromString(String) + long getLeastSignificantBits() + long getMostSignificantBits() + UUID nameUUIDFromBytes(byte[]) + long node() + long timestamp() + int variant() + int version() +} + +class Vector -> java.util.Vector extends AbstractList,List,AbstractCollection,Collection,Iterable,RandomAccess,Object { + Vector () + Vector (Collection) + void addElement(def) + void copyInto(Object[]) + def elementAt(int) + Enumeration elements() + def firstElement() + void insertElementAt(def,int) + def lastElement() + int lastIndexOf(def,int) + void removeAllElements() + boolean removeElement(def) + void removeElementAt(int) + void setElementAt(def,int) + def clone() +} + +#### Enums + +class Formatter.BigDecimalLayoutForm -> java.util.Formatter$BigDecimalLayoutForm extends Enum,Comparable,Object { + Formatter.BigDecimalLayoutForm DECIMAL_FLOAT + Formatter.BigDecimalLayoutForm SCIENTIFIC +} + +class Locale.Category -> java.util.Locale$Category extends Enum,Comparable,Object { + Locale.Category DISPLAY + Locale.Category FORMAT + Locale.Category valueOf(String) + Locale.Category[] values() +} + +class Locale.FilteringMode -> java.util.Locale$FilteringMode extends Enum,Comparable,Object { + Locale.FilteringMode AUTOSELECT_FILTERING + Locale.FilteringMode EXTENDED_FILTERING + Locale.FilteringMode IGNORE_EXTENDED_RANGES + Locale.FilteringMode MAP_EXTENDED_RANGES + Locale.FilteringMode REJECT_EXTENDED_RANGES + Locale.FilteringMode valueOf(String) + Locale.FilteringMode[] values() +} + +#### Exceptions + +class ConcurrentModificationException -> java.util.ConcurrentModificationException extends RuntimeException,Exception { + ConcurrentModificationException () + ConcurrentModificationException (String) +} + +class DuplicateFormatFlagsException -> java.util.DuplicateFormatFlagsException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + DuplicateFormatFlagsException (String) + String getFlags() +} + +class EmptyStackException -> java.util.EmptyStackException extends RuntimeException,Exception { + EmptyStackException () +} + +class FormatFlagsConversionMismatchException -> java.util.FormatFlagsConversionMismatchException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + FormatFlagsConversionMismatchException (String,char) + char getConversion() + String getFlags() +} + +class FormatterClosedException -> java.util.FormatterClosedException extends IllegalStateException,RuntimeException,Exception { + FormatterClosedException () +} + +class IllegalFormatCodePointException -> java.util.IllegalFormatCodePointException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + IllegalFormatCodePointException (int) + int getCodePoint() +} + +class IllegalFormatConversionException -> java.util.IllegalFormatConversionException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + char getConversion() +} + +class IllegalFormatException -> java.util.IllegalFormatException extends IllegalArgumentException,RuntimeException,Exception { +} + +class IllegalFormatFlagsException -> java.util.IllegalFormatFlagsException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + IllegalFormatFlagsException (String) + String getFlags() +} + +class IllegalFormatPrecisionException -> java.util.IllegalFormatPrecisionException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + IllegalFormatPrecisionException (int) + int getPrecision() +} + +class IllegalFormatWidthException -> java.util.IllegalFormatWidthException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + IllegalFormatWidthException (int) + int getWidth() +} + +class IllformedLocaleException -> java.util.IllformedLocaleException extends RuntimeException,Exception { + IllformedLocaleException () + IllformedLocaleException (String) + IllformedLocaleException (String,int) + int getErrorIndex() +} + +class InputMismatchException -> java.util.InputMismatchException extends NoSuchElementException,RuntimeException,Exception { + InputMismatchException () + InputMismatchException (String) +} + +class MissingFormatArgumentException -> java.util.MissingFormatArgumentException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + MissingFormatArgumentException (String) + String getFormatSpecifier() +} + +class MissingFormatWidthException -> java.util.MissingFormatWidthException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + MissingFormatWidthException (String) + String getFormatSpecifier() +} + +class MissingResourceException -> java.util.MissingResourceException extends RuntimeException,Exception { + MissingResourceException (String,String,String) + String getClassName() + String getKey() +} + +class NoSuchElementException -> java.util.NoSuchElementException extends RuntimeException,Exception { + NoSuchElementException () + NoSuchElementException (String) +} + +class TooManyListenersException -> java.util.TooManyListenersException extends Exception { + TooManyListenersException () + TooManyListenersException (String) +} + +class UnknownFormatConversionException -> java.util.UnknownFormatConversionException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + UnknownFormatConversionException (String) + String getConversion() +} + +class UnknownFormatFlagsException -> java.util.UnknownFormatFlagsException extends IllegalFormatException,IllegalArgumentException,RuntimeException,Exception { + UnknownFormatFlagsException (String) + String getFlags() +} diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt new file mode 100644 index 00000000000..a4af7c318ec --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.txt @@ -0,0 +1,127 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# +# Painless definition file. This defines the hierarchy of classes, +# what methods and fields they have, etc. +# + +#### Primitive types + +class void -> void { +} + +class boolean -> boolean { +} + +class byte -> byte { +} + +class short -> short { +} + +class char -> char { +} + +class int -> int { +} + +class long -> long { +} + +class float -> float { +} + +class double -> double { +} + +class def -> java.lang.Object { + boolean equals(Object) + int hashCode() + String toString() +} + +#### ES Scripting API + +class GeoPoint -> org.elasticsearch.common.geo.GeoPoint extends Object { + double getLat() + double getLon() +} + +class Strings -> org.elasticsearch.index.fielddata.ScriptDocValues$Strings extends List,Collection,Iterable,Object { + String getValue() + List getValues() +} + +class Longs -> org.elasticsearch.index.fielddata.ScriptDocValues$Longs extends List,Collection,Iterable,Object { + long getValue() + List getValues() +} + +class Doubles -> org.elasticsearch.index.fielddata.ScriptDocValues$Doubles extends List,Collection,Iterable,Object { + double getValue() + List getValues() +} + +class GeoPoints -> org.elasticsearch.index.fielddata.ScriptDocValues$GeoPoints extends List,Collection,Iterable,Object { + GeoPoint getValue() + List getValues() + double getLat() + double getLon() + double[] getLats() + double[] getLons() + + # geo distance functions... so many... + double factorDistance(double,double) + double factorDistanceWithDefault(double,double,double) + double factorDistance02(double,double) + double factorDistance13(double,double) + double arcDistance(double,double) + double arcDistanceWithDefault(double,double,double) + double arcDistanceInKm(double,double) + double arcDistanceInKmWithDefault(double,double,double) + double arcDistanceInMiles(double,double) + double arcDistanceInMilesWithDefault(double,double,double) + double distance(double,double) + double distanceWithDefault(double,double,double) + double distanceInKm(double,double) + double distanceInKmWithDefault(double,double,double) + double distanceInMiles(double,double) + double distanceInMilesWithDefault(double,double,double) + double geohashDistance(String) + double geohashDistanceInKm(String) + double geohashDistanceInMiles(String) +} + +# for testing. +# currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods +class FeatureTest -> org.elasticsearch.painless.FeatureTest extends Object { + FeatureTest () + FeatureTest (int,int) + int getX() + int getY() + void setX(int) + void setY(int) + boolean overloadedStatic() + boolean overloadedStatic(boolean) +} + +# currently needed internally +class Executable -> org.elasticsearch.painless.Executable { +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java index 2036c4fd04c..8dabacdd5f9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java @@ -34,7 +34,7 @@ public class ArrayTests extends ScriptTestCase { assertArrayLength(10, new Integer[10]); assertArrayLength(11, new String[11][2]); } - + private void assertArrayLength(int length, Object array) throws Throwable { assertEquals(length, (int) Def.arrayLengthGetter(array.getClass()).invoke(array)); } @@ -43,36 +43,36 @@ public class ArrayTests extends ScriptTestCase { assertEquals(5, exec("def x = new int[5]; return x.length")); assertEquals(5, exec("def x = new int[4]; x[0] = 5; return x[0];")); } - + public void testArrayLoadStoreString() { assertEquals(5, exec("def x = new String[5]; return x.length")); assertEquals("foobar", exec("def x = new String[4]; x[0] = 'foobar'; return x[0];")); } - + public void testArrayLoadStoreDef() { assertEquals(5, exec("def x = new def[5]; return x.length")); assertEquals(5, exec("def x = new def[4]; x[0] = 5; return x[0];")); } - + public void testArrayCompoundInt() { assertEquals(6, exec("int[] x = new int[5]; x[0] = 5; x[0]++; return x[0];")); } - + public void testArrayCompoundDef() { assertEquals(6, exec("def x = new int[5]; x[0] = 5; x[0]++; return x[0];")); } - + public void testJacksCrazyExpression1() { assertEquals(1, exec("int x; def[] y = new def[1]; x = y[0] = 1; return x;")); } - + public void testJacksCrazyExpression2() { assertEquals(1, exec("int x; def y = new def[1]; x = y[0] = 1; return x;")); } - + public void testForLoop() { assertEquals(999*1000/2, exec("def a = new int[1000]; for (int x = 0; x < a.length; x++) { a[x] = x; } "+ "int total = 0; for (int x = 0; x < a.length; x++) { total += a[x]; } return total;")); } - + } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index af067d15252..043b614cadd 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -19,15 +19,16 @@ package org.elasticsearch.painless; +import java.util.Arrays; +import java.util.Collections; + public class BasicAPITests extends ScriptTestCase { public void testListIterator() { assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); - assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); assertEquals(3, exec("def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + "def total = 0; while (y.hasNext()) total += y.next(); return total;")); } @@ -35,10 +36,8 @@ public class BasicAPITests extends ScriptTestCase { public void testSetIterator() { assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); - assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + - "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); - assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + - "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); assertEquals(3, exec("def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); } @@ -49,41 +48,53 @@ public class BasicAPITests extends ScriptTestCase { assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); } - + /** Test loads and stores with a map */ public void testMapLoadStore() { assertEquals(5, exec("def x = new HashMap(); x.abc = 5; return x.abc;")); assertEquals(5, exec("def x = new HashMap(); x['abc'] = 5; return x['abc'];")); } - + /** Test loads and stores with a list */ public void testListLoadStore() { assertEquals(5, exec("def x = new ArrayList(); x.add(3); x.0 = 5; return x.0;")); assertEquals(5, exec("def x = new ArrayList(); x.add(3); x[0] = 5; return x[0];")); } - + /** Test shortcut for getters with isXXXX */ public void testListEmpty() { assertEquals(true, exec("def x = new ArrayList(); return x.empty;")); assertEquals(true, exec("def x = new HashMap(); return x.empty;")); } - + /** Test list method invocation */ public void testListGet() { assertEquals(5, exec("def x = new ArrayList(); x.add(5); return x.get(0);")); assertEquals(5, exec("def x = new ArrayList(); x.add(5); def index = 0; return x.get(index);")); } - + public void testListAsArray() { assertEquals(1, exec("def x = new ArrayList(); x.add(5); return x.length")); assertEquals(5, exec("def x = new ArrayList(); x.add(5); return x[0]")); assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); - assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); - assertEquals(1, exec("List x = new ArrayList(); x.add('Hallo'); return x.length")); } - + public void testDefAssignments() { assertEquals(2, exec("int x; def y = 2.0; x = (int)y;")); } + + public void testInternalBoxing() { + assertBytecodeExists("def x = true", "INVOKESTATIC java/lang/Boolean.valueOf (Z)Ljava/lang/Boolean;"); + assertBytecodeExists("def x = (byte)1", "INVOKESTATIC java/lang/Byte.valueOf (B)Ljava/lang/Byte;"); + assertBytecodeExists("def x = (short)1", "INVOKESTATIC java/lang/Short.valueOf (S)Ljava/lang/Short;"); + assertBytecodeExists("def x = (char)1", "INVOKESTATIC java/lang/Character.valueOf (C)Ljava/lang/Character;"); + assertBytecodeExists("def x = 1", "INVOKESTATIC java/lang/Integer.valueOf (I)Ljava/lang/Integer;"); + assertBytecodeExists("def x = 1L", "INVOKESTATIC java/lang/Long.valueOf (J)Ljava/lang/Long;"); + assertBytecodeExists("def x = 1F", "INVOKESTATIC java/lang/Float.valueOf (F)Ljava/lang/Float;"); + assertBytecodeExists("def x = 1D", "INVOKESTATIC java/lang/Double.valueOf (D)Ljava/lang/Double;"); + } + void testStream() { + assertEquals(11, exec("params.list.stream().sum()", Collections.singletonMap("list", Arrays.asList(1,2,3,5)))); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index 2fb676bf299..2a8f3674ac1 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -96,15 +96,15 @@ public class BasicExpressionTests extends ScriptTestCase { } /** - * Test boxed objects in various places + * Test boxed def objects in various places */ public void testBoxing() { // return assertEquals(4, exec("return params.get(\"x\");", Collections.singletonMap("x", 4))); // assignment - assertEquals(4, exec("int y = (Integer)params.get(\"x\"); return y;", Collections.singletonMap("x", 4))); + assertEquals(4, exec("int y = params.get(\"x\"); return y;", Collections.singletonMap("x", 4))); // comparison - assertEquals(true, exec("return 5 > (Integer)params.get(\"x\");", Collections.singletonMap("x", 4))); + assertEquals(true, exec("return 5 > params.get(\"x\");", Collections.singletonMap("x", 4))); } public void testBool() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index f0022e6bcf1..0d6a54b515b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -135,7 +135,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(2.0, exec("double a = 2; return a;")); assertEquals(false, exec("boolean a = false; return a;")); assertEquals("string", exec("String a = \"string\"; return a;")); - assertEquals(HashMap.class, exec("Map a = new HashMap(); return a;").getClass()); + assertEquals(HashMap.class, exec("Map a = new HashMap(); return a;").getClass()); assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a;").getClass()); assertEquals(short[].class, exec("short[] a = new short[1]; return a;").getClass()); @@ -146,7 +146,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(double[].class, exec("double[] a = new double[1]; return a;").getClass()); assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a;").getClass()); assertEquals(String[].class, exec("String[] a = new String[1]; return a;").getClass()); - assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a;").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a;").getClass()); assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a;").getClass()); assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a;").getClass()); @@ -157,7 +157,7 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a;").getClass()); assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a;").getClass()); assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a;").getClass()); - assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a;").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a;").getClass()); } public void testContinueStatement() { @@ -174,6 +174,6 @@ public class BasicStatementTests extends ScriptTestCase { assertEquals(5, exec("int x = 5; return x;")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1];")); assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s;"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap< String , Object >(); s.put(\"x\", 10); return s;")).get("x")); + assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s;")).get("x")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java index d54b976d65d..03593116538 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/CompoundAssignmentTests.java @@ -230,18 +230,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(false, exec("boolean x = true; x &= false; return x;")); assertEquals(false, exec("boolean x = false; x &= true; return x;")); assertEquals(false, exec("boolean x = false; x &= false; return x;")); - assertEquals(true, exec("Boolean x = true; x &= true; return x;")); - assertEquals(false, exec("Boolean x = true; x &= false; return x;")); - assertEquals(false, exec("Boolean x = false; x &= true; return x;")); - assertEquals(false, exec("Boolean x = false; x &= false; return x;")); + assertEquals(true, exec("def x = true; x &= true; return x;")); + assertEquals(false, exec("def x = true; x &= false; return x;")); + assertEquals(false, exec("def x = false; x &= true; return x;")); + assertEquals(false, exec("def x = false; x &= false; return x;")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] &= false; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] &= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] &= false; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] &= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] &= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = true; x[0] &= false; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] &= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] &= false; return x[0];")); // byte assertEquals((byte) (13 & 14), exec("byte x = 13; x &= 14; return x;")); @@ -261,18 +261,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("boolean x = true; x |= false; return x;")); assertEquals(true, exec("boolean x = false; x |= true; return x;")); assertEquals(false, exec("boolean x = false; x |= false; return x;")); - assertEquals(true, exec("Boolean x = true; x |= true; return x;")); - assertEquals(true, exec("Boolean x = true; x |= false; return x;")); - assertEquals(true, exec("Boolean x = false; x |= true; return x;")); - assertEquals(false, exec("Boolean x = false; x |= false; return x;")); + assertEquals(true, exec("def x = true; x |= true; return x;")); + assertEquals(true, exec("def x = true; x |= false; return x;")); + assertEquals(true, exec("def x = false; x |= true; return x;")); + assertEquals(false, exec("def x = false; x |= false; return x;")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= true; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] |= false; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] |= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= true; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] |= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] |= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] |= true; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] |= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = false; x[0] |= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] |= false; return x[0];")); // byte assertEquals((byte) (13 | 14), exec("byte x = 13; x |= 14; return x;")); @@ -292,18 +292,18 @@ public class CompoundAssignmentTests extends ScriptTestCase { assertEquals(true, exec("boolean x = true; x ^= false; return x;")); assertEquals(true, exec("boolean x = false; x ^= true; return x;")); assertEquals(false, exec("boolean x = false; x ^= false; return x;")); - assertEquals(false, exec("Boolean x = true; x ^= true; return x;")); - assertEquals(true, exec("Boolean x = true; x ^= false; return x;")); - assertEquals(true, exec("Boolean x = false; x ^= true; return x;")); - assertEquals(false, exec("Boolean x = false; x ^= false; return x;")); + assertEquals(false, exec("def x = true; x ^= true; return x;")); + assertEquals(true, exec("def x = true; x ^= false; return x;")); + assertEquals(true, exec("def x = false; x ^= true; return x;")); + assertEquals(false, exec("def x = false; x ^= false; return x;")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= true; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = true; x[0] ^= false; return x[0];")); - assertEquals(true, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= true; return x[0];")); - assertEquals(false, exec("Boolean[] x = new Boolean[1]; x[0] = false; x[0] ^= false; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = true; x[0] ^= true; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] ^= false; return x[0];")); + assertEquals(true, exec("def[] x = new def[1]; x[0] = false; x[0] ^= true; return x[0];")); + assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] ^= false; return x[0];")); // byte assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java index 859825f129a..a3a09fee425 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ConditionalTests.java @@ -65,10 +65,8 @@ public class ConditionalTests extends ScriptTestCase { public void testPromotion() { assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? 2 : 4.0F);")); - assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new Long(2) : new Float(4.0F));")); assertEquals(false, exec("boolean x = false; boolean y = true; " + - "return (x ? new HashMap() : new ArrayList()) == (y ? new Long(2) : new Float(4.0F));")); - assertEquals(false, exec("boolean x = false; boolean y = true; return (x ? 2 : 4.0F) == (y ? new HashMap() : new ArrayList());")); + "return (x ? new HashMap() : new ArrayList()) == (y ? new HashMap() : new ArrayList());")); } public void testIncompatibleAssignment() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java index bfc0d142045..9f171a96889 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefOperationTests.java @@ -22,10 +22,10 @@ package org.elasticsearch.painless; public class DefOperationTests extends ScriptTestCase { public void testIllegalCast() { Exception exception = expectThrows(ClassCastException.class, () -> exec("def x = 1.0; int y = x; return y;")); - assertTrue(exception.getMessage().contains("java.lang.Double cannot be cast to java.lang.Integer")); + assertTrue(exception.getMessage().contains("cannot be cast")); exception = expectThrows(ClassCastException.class, () -> exec("def x = (short)1; byte y = x; return y;")); - assertTrue(exception.getMessage().contains("java.lang.Short cannot be cast to java.lang.Byte")); + assertTrue(exception.getMessage().contains("cannot be cast")); } public void testNot() { @@ -103,13 +103,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(4D, exec("def x = (float)2; def y = (double)2; return x * y")); assertEquals(4D, exec("def x = (double)2; def y = (double)2; return x * y")); - assertEquals(4, exec("def x = (Byte)2; def y = (byte)2; return x * y")); - assertEquals(4, exec("def x = (Short)2; def y = (short)2; return x * y")); - assertEquals(4, exec("def x = (Character)2; def y = (char)2; return x * y")); - assertEquals(4, exec("def x = (Integer)2; def y = (int)2; return x * y")); - assertEquals(4L, exec("def x = (Long)2; def y = (long)2; return x * y")); - assertEquals(4F, exec("def x = (Float)2; def y = (float)2; return x * y")); - assertEquals(4D, exec("def x = (Double)2; def y = (double)2; return x * y")); + assertEquals(4, exec("def x = (byte)2; def y = (byte)2; return x * y")); + assertEquals(4, exec("def x = (short)2; def y = (short)2; return x * y")); + assertEquals(4, exec("def x = (char)2; def y = (char)2; return x * y")); + assertEquals(4, exec("def x = (int)2; def y = (int)2; return x * y")); + assertEquals(4L, exec("def x = (long)2; def y = (long)2; return x * y")); + assertEquals(4F, exec("def x = (float)2; def y = (float)2; return x * y")); + assertEquals(4D, exec("def x = (double)2; def y = (double)2; return x * y")); } public void testDiv() { @@ -169,13 +169,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(1D, exec("def x = (float)2; def y = (double)2; return x / y")); assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y")); - assertEquals(1, exec("def x = (Byte)2; def y = (byte)2; return x / y")); - assertEquals(1, exec("def x = (Short)2; def y = (short)2; return x / y")); - assertEquals(1, exec("def x = (Character)2; def y = (char)2; return x / y")); - assertEquals(1, exec("def x = (Integer)2; def y = (int)2; return x / y")); - assertEquals(1L, exec("def x = (Long)2; def y = (long)2; return x / y")); - assertEquals(1F, exec("def x = (Float)2; def y = (float)2; return x / y")); - assertEquals(1D, exec("def x = (Double)2; def y = (double)2; return x / y")); + assertEquals(1, exec("def x = (byte)2; def y = (byte)2; return x / y")); + assertEquals(1, exec("def x = (short)2; def y = (short)2; return x / y")); + assertEquals(1, exec("def x = (char)2; def y = (char)2; return x / y")); + assertEquals(1, exec("def x = (int)2; def y = (int)2; return x / y")); + assertEquals(1L, exec("def x = (long)2; def y = (long)2; return x / y")); + assertEquals(1F, exec("def x = (float)2; def y = (float)2; return x / y")); + assertEquals(1D, exec("def x = (double)2; def y = (double)2; return x / y")); } public void testRem() { @@ -235,13 +235,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0D, exec("def x = (float)2; def y = (double)2; return x % y")); assertEquals(0D, exec("def x = (double)2; def y = (double)2; return x % y")); - assertEquals(0, exec("def x = (Byte)2; def y = (byte)2; return x % y")); - assertEquals(0, exec("def x = (Short)2; def y = (short)2; return x % y")); - assertEquals(0, exec("def x = (Character)2; def y = (char)2; return x % y")); - assertEquals(0, exec("def x = (Integer)2; def y = (int)2; return x % y")); - assertEquals(0L, exec("def x = (Long)2; def y = (long)2; return x % y")); - assertEquals(0F, exec("def x = (Float)2; def y = (float)2; return x % y")); - assertEquals(0D, exec("def x = (Double)2; def y = (double)2; return x % y")); + assertEquals(0, exec("def x = (byte)2; def y = (byte)2; return x % y")); + assertEquals(0, exec("def x = (short)2; def y = (short)2; return x % y")); + assertEquals(0, exec("def x = (char)2; def y = (char)2; return x % y")); + assertEquals(0, exec("def x = (int)2; def y = (int)2; return x % y")); + assertEquals(0L, exec("def x = (long)2; def y = (long)2; return x % y")); + assertEquals(0F, exec("def x = (float)2; def y = (float)2; return x % y")); + assertEquals(0D, exec("def x = (double)2; def y = (double)2; return x % y")); } public void testAdd() { @@ -301,13 +301,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2D, exec("def x = (float)1; def y = (double)1; return x + y")); assertEquals(2D, exec("def x = (double)1; def y = (double)1; return x + y")); - assertEquals(2, exec("def x = (Byte)1; def y = (byte)1; return x + y")); - assertEquals(2, exec("def x = (Short)1; def y = (short)1; return x + y")); - assertEquals(2, exec("def x = (Character)1; def y = (char)1; return x + y")); - assertEquals(2, exec("def x = (Integer)1; def y = (int)1; return x + y")); - assertEquals(2L, exec("def x = (Long)1; def y = (long)1; return x + y")); - assertEquals(2F, exec("def x = (Float)1; def y = (float)1; return x + y")); - assertEquals(2D, exec("def x = (Double)1; def y = (double)1; return x + y")); + assertEquals(2, exec("def x = (byte)1; def y = (byte)1; return x + y")); + assertEquals(2, exec("def x = (short)1; def y = (short)1; return x + y")); + assertEquals(2, exec("def x = (char)1; def y = (char)1; return x + y")); + assertEquals(2, exec("def x = (int)1; def y = (int)1; return x + y")); + assertEquals(2L, exec("def x = (long)1; def y = (long)1; return x + y")); + assertEquals(2F, exec("def x = (float)1; def y = (float)1; return x + y")); + assertEquals(2D, exec("def x = (double)1; def y = (double)1; return x + y")); } public void testSub() { @@ -367,13 +367,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0D, exec("def x = (float)1; def y = (double)1; return x - y")); assertEquals(0D, exec("def x = (double)1; def y = (double)1; return x - y")); - assertEquals(0, exec("def x = (Byte)1; def y = (byte)1; return x - y")); - assertEquals(0, exec("def x = (Short)1; def y = (short)1; return x - y")); - assertEquals(0, exec("def x = (Character)1; def y = (char)1; return x - y")); - assertEquals(0, exec("def x = (Integer)1; def y = (int)1; return x - y")); - assertEquals(0L, exec("def x = (Long)1; def y = (long)1; return x - y")); - assertEquals(0F, exec("def x = (Float)1; def y = (float)1; return x - y")); - assertEquals(0D, exec("def x = (Double)1; def y = (double)1; return x - y")); + assertEquals(0, exec("def x = (byte)1; def y = (byte)1; return x - y")); + assertEquals(0, exec("def x = (short)1; def y = (short)1; return x - y")); + assertEquals(0, exec("def x = (char)1; def y = (char)1; return x - y")); + assertEquals(0, exec("def x = (int)1; def y = (int)1; return x - y")); + assertEquals(0L, exec("def x = (long)1; def y = (long)1; return x - y")); + assertEquals(0F, exec("def x = (float)1; def y = (float)1; return x - y")); + assertEquals(0D, exec("def x = (double)1; def y = (double)1; return x - y")); } public void testLsh() { @@ -433,13 +433,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)1; def y = (double)1; return x << y")); assertEquals(2L, exec("def x = (double)1; def y = (double)1; return x << y")); - assertEquals(2, exec("def x = (Byte)1; def y = (byte)1; return x << y")); - assertEquals(2, exec("def x = (Short)1; def y = (short)1; return x << y")); - assertEquals(2, exec("def x = (Character)1; def y = (char)1; return x << y")); - assertEquals(2, exec("def x = (Integer)1; def y = (int)1; return x << y")); - assertEquals(2L, exec("def x = (Long)1; def y = (long)1; return x << y")); - assertEquals(2L, exec("def x = (Float)1; def y = (float)1; return x << y")); - assertEquals(2L, exec("def x = (Double)1; def y = (double)1; return x << y")); + assertEquals(2, exec("def x = (byte)1; def y = (byte)1; return x << y")); + assertEquals(2, exec("def x = (short)1; def y = (short)1; return x << y")); + assertEquals(2, exec("def x = (char)1; def y = (char)1; return x << y")); + assertEquals(2, exec("def x = (int)1; def y = (int)1; return x << y")); + assertEquals(2L, exec("def x = (long)1; def y = (long)1; return x << y")); + assertEquals(2L, exec("def x = (float)1; def y = (float)1; return x << y")); + assertEquals(2L, exec("def x = (double)1; def y = (double)1; return x << y")); } public void testRsh() { @@ -499,13 +499,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)4; def y = (double)1; return x >> y")); assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >> y")); - assertEquals(2, exec("def x = (Byte)4; def y = (byte)1; return x >> y")); - assertEquals(2, exec("def x = (Short)4; def y = (short)1; return x >> y")); - assertEquals(2, exec("def x = (Character)4; def y = (char)1; return x >> y")); - assertEquals(2, exec("def x = (Integer)4; def y = (int)1; return x >> y")); - assertEquals(2L, exec("def x = (Long)4; def y = (long)1; return x >> y")); - assertEquals(2L, exec("def x = (Float)4; def y = (float)1; return x >> y")); - assertEquals(2L, exec("def x = (Double)4; def y = (double)1; return x >> y")); + assertEquals(2, exec("def x = (byte)4; def y = (byte)1; return x >> y")); + assertEquals(2, exec("def x = (short)4; def y = (short)1; return x >> y")); + assertEquals(2, exec("def x = (char)4; def y = (char)1; return x >> y")); + assertEquals(2, exec("def x = (int)4; def y = (int)1; return x >> y")); + assertEquals(2L, exec("def x = (long)4; def y = (long)1; return x >> y")); + assertEquals(2L, exec("def x = (float)4; def y = (float)1; return x >> y")); + assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >> y")); } public void testUsh() { @@ -565,13 +565,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(2L, exec("def x = (float)4; def y = (double)1; return x >>> y")); assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >>> y")); - assertEquals(2, exec("def x = (Byte)4; def y = (byte)1; return x >>> y")); - assertEquals(2, exec("def x = (Short)4; def y = (short)1; return x >>> y")); - assertEquals(2, exec("def x = (Character)4; def y = (char)1; return x >>> y")); - assertEquals(2, exec("def x = (Integer)4; def y = (int)1; return x >>> y")); - assertEquals(2L, exec("def x = (Long)4; def y = (long)1; return x >>> y")); - assertEquals(2L, exec("def x = (Float)4; def y = (float)1; return x >>> y")); - assertEquals(2L, exec("def x = (Double)4; def y = (double)1; return x >>> y")); + assertEquals(2, exec("def x = (byte)4; def y = (byte)1; return x >>> y")); + assertEquals(2, exec("def x = (short)4; def y = (short)1; return x >>> y")); + assertEquals(2, exec("def x = (char)4; def y = (char)1; return x >>> y")); + assertEquals(2, exec("def x = (int)4; def y = (int)1; return x >>> y")); + assertEquals(2L, exec("def x = (long)4; def y = (long)1; return x >>> y")); + assertEquals(2L, exec("def x = (float)4; def y = (float)1; return x >>> y")); + assertEquals(2L, exec("def x = (double)4; def y = (double)1; return x >>> y")); } public void testAnd() { @@ -631,13 +631,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(0L, exec("def x = (float)4; def y = (double)1; return x & y")); assertEquals(0L, exec("def x = (double)4; def y = (double)1; return x & y")); - assertEquals(0, exec("def x = (Byte)4; def y = (byte)1; return x & y")); - assertEquals(0, exec("def x = (Short)4; def y = (short)1; return x & y")); - assertEquals(0, exec("def x = (Character)4; def y = (char)1; return x & y")); - assertEquals(0, exec("def x = (Integer)4; def y = (int)1; return x & y")); - assertEquals(0L, exec("def x = (Long)4; def y = (long)1; return x & y")); - assertEquals(0L, exec("def x = (Float)4; def y = (float)1; return x & y")); - assertEquals(0L, exec("def x = (Double)4; def y = (double)1; return x & y")); + assertEquals(0, exec("def x = (byte)4; def y = (byte)1; return x & y")); + assertEquals(0, exec("def x = (short)4; def y = (short)1; return x & y")); + assertEquals(0, exec("def x = (char)4; def y = (char)1; return x & y")); + assertEquals(0, exec("def x = (int)4; def y = (int)1; return x & y")); + assertEquals(0L, exec("def x = (long)4; def y = (long)1; return x & y")); + assertEquals(0L, exec("def x = (float)4; def y = (float)1; return x & y")); + assertEquals(0L, exec("def x = (double)4; def y = (double)1; return x & y")); } public void testXor() { @@ -697,13 +697,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(5L, exec("def x = (float)4; def y = (double)1; return x ^ y")); assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x ^ y")); - assertEquals(5, exec("def x = (Byte)4; def y = (byte)1; return x ^ y")); - assertEquals(5, exec("def x = (Short)4; def y = (short)1; return x ^ y")); - assertEquals(5, exec("def x = (Character)4; def y = (char)1; return x ^ y")); - assertEquals(5, exec("def x = (Integer)4; def y = (int)1; return x ^ y")); - assertEquals(5L, exec("def x = (Long)4; def y = (long)1; return x ^ y")); - assertEquals(5L, exec("def x = (Float)4; def y = (float)1; return x ^ y")); - assertEquals(5L, exec("def x = (Double)4; def y = (double)1; return x ^ y")); + assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y")); + assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y")); + assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y")); + assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y")); + assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y")); + assertEquals(5L, exec("def x = (float)4; def y = (float)1; return x ^ y")); + assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x ^ y")); } public void testOr() { @@ -763,13 +763,13 @@ public class DefOperationTests extends ScriptTestCase { assertEquals(5L, exec("def x = (float)4; def y = (double)1; return x | y")); assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x | y")); - assertEquals(5, exec("def x = (Byte)4; def y = (byte)1; return x | y")); - assertEquals(5, exec("def x = (Short)4; def y = (short)1; return x | y")); - assertEquals(5, exec("def x = (Character)4; def y = (char)1; return x | y")); - assertEquals(5, exec("def x = (Integer)4; def y = (int)1; return x | y")); - assertEquals(5L, exec("def x = (Long)4; def y = (long)1; return x | y")); - assertEquals(5L, exec("def x = (Float)4; def y = (float)1; return x | y")); - assertEquals(5L, exec("def x = (Double)4; def y = (double)1; return x | y")); + assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x | y")); + assertEquals(5, exec("def x = (short)4; def y = (short)1; return x | y")); + assertEquals(5, exec("def x = (char)4; def y = (char)1; return x | y")); + assertEquals(5, exec("def x = (int)4; def y = (int)1; return x | y")); + assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x | y")); + assertEquals(5L, exec("def x = (float)4; def y = (float)1; return x | y")); + assertEquals(5L, exec("def x = (double)4; def y = (double)1; return x | y")); } public void testEq() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java index 8043d9da915..7e4448495a9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java @@ -94,17 +94,8 @@ public class EqualsTests extends ScriptTestCase { } public void testEquals() { - assertEquals(true, exec("return new Long(3) == new Long(3);")); - assertEquals(false, exec("return new Long(3) === new Long(3);")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = x; return x === y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x == y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x === y;")); - assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); short y = 3; return x == y;")); - assertEquals(true, exec("Integer x = new Integer(3); Short y = (short)3; return x == y;")); - assertEquals(false, exec("Integer x = new Integer(3); int y = 3; return x === y;")); - assertEquals(false, exec("Integer x = new Integer(3); double y = 3; return x === y;")); + assertEquals(true, exec("return 3 == 3;")); + assertEquals(false, exec("int x = 4; int y = 5; x == y")); assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x == y;")); assertEquals(true, exec("int[] x = new int[1]; Object y = x; return x === y;")); assertEquals(false, exec("int[] x = new int[1]; Object y = new int[1]; return x == y;")); @@ -114,14 +105,8 @@ public class EqualsTests extends ScriptTestCase { } public void testNotEquals() { - assertEquals(false, exec("return new Long(3) != new Long(3);")); - assertEquals(true, exec("return new Long(3) !== new Long(3);")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x != y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = x; return x !== y;")); - assertEquals(false, exec("Integer x = new Integer(3); Object y = new Integer(3); return x != y;")); - assertEquals(true, exec("Integer x = new Integer(3); Object y = new Integer(3); return x !== y;")); - assertEquals(true, exec("Integer x = new Integer(3); int y = 3; return x !== y;")); - assertEquals(true, exec("Integer x = new Integer(3); double y = 3; return x !== y;")); + assertEquals(false, exec("return 3 != 3;")); + assertEquals(true, exec("int x = 4; int y = 5; x != y")); assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x != y;")); assertEquals(false, exec("int[] x = new int[1]; Object y = x; return x !== y;")); assertEquals(true, exec("int[] x = new int[1]; Object y = new int[1]; return x != y;")); @@ -131,54 +116,36 @@ public class EqualsTests extends ScriptTestCase { } public void testBranchEquals() { - assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'b'; if (a == b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'a'; if (a == b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = new Integer(1); Integer b = 1; if (a === b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a === b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Object b = a; if (a === b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = 1; Character b = (char)'a'; if (a === (Object)b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'b'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'a'; if (a == b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; def b = 1; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'a'; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; Object b = a; if (a === b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; Number b = a; Number c = a; if (c === b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; Object b = new HashMap(); if (a === (Object)b) return 1; else return 0;")); } public void testBranchNotEquals() { - assertEquals(1, exec("Character a = (char)'a'; Character b = (char)'b'; if (a != b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Character b = (char)'a'; if (a != b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = new Integer(1); Integer b = 1; if (a !== b) return 1; else return 0;")); - assertEquals(1, exec("Character a = (char)'a'; Character b = new Character((char)'a'); if (a !== b) return 1; else return 0;")); - assertEquals(0, exec("Character a = (char)'a'; Object b = a; if (a !== b) return 1; else return 0;")); - assertEquals(0, exec("Integer a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); - assertEquals(1, exec("Integer a = 1; Character b = (char)'a'; if (a !== (Object)b) return 1; else return 0;")); + assertEquals(1, exec("def a = (char)'a'; def b = (char)'b'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'a'; if (a != b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; def b = 1; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; def b = (char)'a'; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = (char)'a'; Object b = a; if (a !== b) return 1; else return 0;")); + assertEquals(0, exec("def a = 1; Number b = a; Number c = a; if (c !== b) return 1; else return 0;")); + assertEquals(1, exec("def a = 1; Object b = new HashMap(); if (a !== (Object)b) return 1; else return 0;")); } public void testRightHandNull() { - assertEquals(false, exec("Character a = (char)'a'; return a == null;")); - assertEquals(false, exec("Character a = (char)'a'; return a === null;")); - assertEquals(true, exec("Character a = (char)'a'; return a != null;")); - assertEquals(true, exec("Character a = (char)'a'; return a !== null;")); - assertEquals(true, exec("Character a = null; return a == null;")); - assertEquals(false, exec("Character a = null; return a != null;")); - assertEquals(false, exec("Character a = (char)'a'; Character b = null; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return a === b;")); - assertEquals(true, exec("Character a = (char)'a'; Character b = null; return a != b;")); - assertEquals(false, exec("Character a = null; Character b = null; return a !== b;")); - assertEquals(false, exec("Integer x = null; double y = 2.0; return x == y;")); - assertEquals(true, exec("Integer x = null; Short y = null; return x == y;")); + assertEquals(false, exec("HashMap a = new HashMap(); return a == null;")); + assertEquals(false, exec("HashMap a = new HashMap(); return a === null;")); + assertEquals(true, exec("HashMap a = new HashMap(); return a != null;")); + assertEquals(true, exec("HashMap a = new HashMap(); return a !== null;")); } public void testLeftHandNull() { - assertEquals(false, exec("Character a = (char)'a'; return null == a;")); - assertEquals(false, exec("Character a = (char)'a'; return null === a;")); - assertEquals(true, exec("Character a = (char)'a'; return null != a;")); - assertEquals(true, exec("Character a = (char)'a'; return null !== a;")); - assertEquals(true, exec("Character a = null; return null == a;")); - assertEquals(false, exec("Character a = null; return null != a;")); - assertEquals(false, exec("Character a = null; Character b = (char)'a'; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return a == b;")); - assertEquals(true, exec("Character a = null; Character b = null; return b === a;")); - assertEquals(true, exec("Character a = null; Character b = (char)'a'; return a != b;")); - assertEquals(false, exec("Character a = null; Character b = null; return b != a;")); - assertEquals(false, exec("Character a = null; Character b = null; return b !== a;")); - assertEquals(false, exec("Integer x = null; double y = 2.0; return y == x;")); - assertEquals(true, exec("Integer x = null; Short y = null; return y == x;")); + assertEquals(false, exec("HashMap a = new HashMap(); return null == a;")); + assertEquals(false, exec("HashMap a = new HashMap(); return null === a;")); + assertEquals(true, exec("HashMap a = new HashMap(); return null != a;")); + assertEquals(true, exec("HashMap a = new HashMap(); return null !== a;")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java deleted file mode 100644 index 7bec0b110df..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowDisabledTests.java +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import java.util.Collections; -import java.util.Map; - -/** Tests floating point overflow with numeric overflow disabled */ -public class FloatOverflowDisabledTests extends ScriptTestCase { - - /** wire overflow to false for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); - } - - public void testAssignmentAdditionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentSubtractionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentMultiplicationOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentDivisionOverflow() { - // float - try { - exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1.0f; x /= 0.0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double - try { - exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0f; x /= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddition() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAdditionConst() throws Exception { - try { - exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 + 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtraction() throws Exception { - try { - exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractionConst() throws Exception { - try { - exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return -1.7976931348623157E308 - 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplication() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplicationConst() throws Exception { - try { - exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 * 1.7976931348623157E308;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivision() throws Exception { - try { - exec("float x = 3.4028234663852886E38f; float y = 1.401298464324817E-45f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1.0f; float y = 0.0f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0; double y = 0.0; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionConst() throws Exception { - try { - exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0f / 0.0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.7976931348623157E308 / 4.9E-324;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0 / 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionNaN() throws Exception { - // float division, constant division, and assignment - try { - exec("float x = 0f; float y = 0f; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 0f / 0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 0f; x /= 0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double division, constant division, and assignment - try { - exec("double x = 0.0; double y = 0.0; return x / y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 0.0 / 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 0.0; x /= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderNaN() throws Exception { - // float division, constant division, and assignment - try { - exec("float x = 1f; float y = 0f; return x % y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1f % 0f;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("float x = 1f; x %= 0f; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - - // double division, constant division, and assignment - try { - exec("double x = 1.0; double y = 0.0; return x % y;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("return 1.0 % 0.0;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - try { - exec("double x = 1.0; x %= 0.0; return x;"); - fail("didn't hit expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java similarity index 94% rename from modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java index ccfd2232e88..4b3eb8f0e7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowEnabledTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FloatOverflowTests.java @@ -19,17 +19,8 @@ package org.elasticsearch.painless; -import java.util.Collections; -import java.util.Map; - -/** Tests floating point overflow with numeric overflow enabled */ -public class FloatOverflowEnabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); - } +/** Tests floating point overflow cases */ +public class FloatOverflowTests extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // float diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java deleted file mode 100644 index f4adcfce878..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowDisabledTests.java +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import java.util.Collections; -import java.util.Map; - -/** Tests integer overflow with numeric overflow disabled */ -public class IntegerOverflowDisabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); - } - - public void testAssignmentAdditionOverflow() { - // byte - try { - exec("byte x = 0; x += 128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x += -129; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 0; x += 32768; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x += -32769; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 0; x += 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 0; x += -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 1; x += 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = -2; x += -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 1; x += 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -2; x += -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentSubtractionOverflow() { - // byte - try { - exec("byte x = 0; x -= -128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x -= 129; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 0; x -= -32768; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 0; x -= 32769; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 0; x -= -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 0; x -= 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 1; x -= -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = -2; x -= 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 1; x -= -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -2; x -= 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentMultiplicationOverflow() { - // byte - try { - exec("byte x = 2; x *= 128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 2; x *= -128; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 2; x *= 65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 2; x *= -65536; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 2; x *= 2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = 2; x *= -2147483647; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 2; x *= 9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 2; x *= -9223372036854775807L; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAssignmentDivisionOverflow() { - // byte - try { - exec("byte x = (byte) -128; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = (short) -32768; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // cannot happen for char: unsigned - - // int - try { - exec("int x = -2147483647 - 1; x /= -1; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testIncrementOverFlow() throws Exception { - // byte - try { - exec("byte x = 127; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = 127; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = (byte) -128; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("byte x = (byte) -128; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // short - try { - exec("short x = 32767; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = 32767; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = (short) -32768; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("short x = (short) -32768; x--; return x;"); - } catch (ArithmeticException expected) {} - - // char - try { - exec("char x = 65535; ++x; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = 65535; x++; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = (char) 0; --x; return x;"); - } catch (ArithmeticException expected) {} - - try { - exec("char x = (char) 0; x--; return x;"); - } catch (ArithmeticException expected) {} - - // int - try { - exec("int x = 2147483647; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = 2147483647; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = (int) -2147483648L; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("int x = (int) -2147483648L; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - // long - try { - exec("long x = 9223372036854775807L; ++x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; x++; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775807L - 1L; --x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775807L - 1L; x--; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddition() throws Exception { - try { - exec("int x = 2147483647; int y = 2147483647; return x + y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testAdditionConst() throws Exception { - try { - exec("return 2147483647 + 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return 9223372036854775807L + 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - - public void testSubtraction() throws Exception { - try { - exec("int x = -10; int y = 2147483647; return x - y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -10L; long y = 9223372036854775807L; return x - y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractionConst() throws Exception { - try { - exec("return -10 - 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return -10L - 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplication() throws Exception { - try { - exec("int x = 2147483647; int y = 2147483647; return x * y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplicationConst() throws Exception { - try { - exec("return 2147483647 * 2147483647;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return 9223372036854775807L * 9223372036854775807L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivision() throws Exception { - try { - exec("int x = -2147483647 - 1; int y = -1; return x / y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775808L; long y = -1L; return x / y;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivisionConst() throws Exception { - try { - exec("return (-2147483648) / -1;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - - try { - exec("return (-9223372036854775808L) / -1L;"); - fail("should have hit exception"); - } catch (ArithmeticException expected) {} - } - - public void testNegationOverflow() throws Exception { - try { - exec("int x = -2147483648; x = -x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -9223372036854775808L; x = -x; return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testNegationOverflowConst() throws Exception { - try { - exec("int x = -(-2147483648); return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - exec("long x = -(-9223372036854775808L); return x;"); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java similarity index 95% rename from modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java rename to modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java index 41b3f857c0a..1165547bf5a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowEnabledTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/IntegerOverflowTests.java @@ -19,17 +19,8 @@ package org.elasticsearch.painless; -import java.util.Collections; -import java.util.Map; - -/** Tests integer overflow with numeric overflow enabled */ -public class IntegerOverflowEnabledTests extends ScriptTestCase { - - /** wire overflow to true for all tests */ - @Override - public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); - } +/** Tests integer overflow cases */ +public class IntegerOverflowTests extends ScriptTestCase { public void testAssignmentAdditionOverflow() { // byte diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 4b56d9751b6..3fe071c5221 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -60,6 +60,7 @@ public class NeedsScoreTests extends ESSingleNodeTestCase { ss = service.search(new CompiledScript(ScriptType.INLINE, "randomName", "painless", compiled), lookup, Collections.emptyMap()); assertTrue(ss.needsScores()); + service.close(); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java index f2e65fc680c..c57a756f109 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NoSemiColonTests.java @@ -35,7 +35,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(2.0, exec("double a = 2; return a")); assertEquals(false, exec("boolean a = false; return a")); assertEquals("string", exec("String a = \"string\"; return a")); - assertEquals(HashMap.class, exec("Map a = new HashMap(); return a").getClass()); + assertEquals(HashMap.class, exec("Map a = new HashMap(); return a").getClass()); assertEquals(byte[].class, exec("byte[] a = new byte[1]; return a").getClass()); assertEquals(short[].class, exec("short[] a = new short[1]; return a").getClass()); @@ -46,7 +46,7 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(double[].class, exec("double[] a = new double[1]; return a").getClass()); assertEquals(boolean[].class, exec("boolean[] a = new boolean[1]; return a").getClass()); assertEquals(String[].class, exec("String[] a = new String[1]; return a").getClass()); - assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a").getClass()); + assertEquals(Map[].class, exec("Map[] a = new Map[1]; return a").getClass()); assertEquals(byte[][].class, exec("byte[][] a = new byte[1][2]; return a").getClass()); assertEquals(short[][][].class, exec("short[][][] a = new short[1][2][3]; return a").getClass()); @@ -57,9 +57,9 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(double[][][][].class, exec("double[][][][] a = new double[1][2][3][4]; return a").getClass()); assertEquals(boolean[][][][][].class, exec("boolean[][][][][] a = new boolean[1][2][3][4][5]; return a").getClass()); assertEquals(String[][].class, exec("String[][] a = new String[1][2]; return a").getClass()); - assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a").getClass()); + assertEquals(Map[][][].class, exec("Map[][][] a = new Map[1][2][3]; return a").getClass()); } - + public void testExpression() { assertEquals(10, exec("10")); assertEquals(10, exec("5 + 5")); @@ -73,6 +73,6 @@ public class NoSemiColonTests extends ScriptTestCase { assertEquals(5, exec("int x = 5; return x")); assertEquals(4, exec("int[] x = new int[2]; x[1] = 4; return x[1]")); assertEquals(5, ((short[])exec("short[] s = new short[3]; s[1] = 5; return s"))[1]); - assertEquals(10, ((Map)exec("Map s = new HashMap< String,Object>(); s.put(\"x\", 10); return s")).get("x")); + assertEquals(10, ((Map)exec("Map s = new HashMap(); s.put(\"x\", 10); return s")).get("x")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 2ccd2f1460a..d95fa3897da 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase; import org.junit.Before; import java.util.Collections; +import java.util.HashMap; import java.util.Map; /** @@ -48,7 +49,9 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + Map compilerSettings = new HashMap<>(); + compilerSettings.put(CompilerSettings.PICKY, "true"); + return exec(script, vars, compilerSettings); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index e61541bf371..b06199cf903 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -70,13 +70,13 @@ public class StringTests extends ScriptTestCase { public void testAppendMultiple() { assertEquals("cat" + true + "abc" + null, exec("String s = \"cat\"; return s + true + 'abc' + null;")); } - + public void testAppendMany() { for (int i = MAX_INDY_STRING_CONCAT_ARGS - 5; i < MAX_INDY_STRING_CONCAT_ARGS + 5; i++) { doTestAppendMany(i); } } - + private void doTestAppendMany(int count) { StringBuilder script = new StringBuilder("String s = \"cat\"; return s"); StringBuilder result = new StringBuilder("cat"); @@ -90,11 +90,11 @@ public class StringTests extends ScriptTestCase { Debugger.toString(s).contains(String.format(Locale.ROOT, "LDC \"%03d\"", count/2))); assertEquals(result.toString(), exec(s)); } - + public void testNestedConcats() { assertEquals("foo1010foo", exec("String s = 'foo'; String x = '10'; return s + Integer.parseInt(x + x) + s;")); } - + public void testStringAPI() { assertEquals("", exec("return new String();")); assertEquals('x', exec("String s = \"x\"; return s.charAt(0);")); @@ -166,14 +166,14 @@ public class StringTests extends ScriptTestCase { assertEquals("cc", exec("return (String)(char)\"cc\"")); fail(); } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char].")); + assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } try { assertEquals("cc", exec("return (String)(char)'cc'")); fail(); } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast from [String] to [char].")); + assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } try { @@ -189,41 +189,5 @@ public class StringTests extends ScriptTestCase { } catch (final ClassCastException cce) { assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); } - - assertEquals('c', exec("return (Character)\"c\"")); - assertEquals('c', exec("return (Character)'c'")); - assertEquals("c", exec("return (String)(Character)\"c\"")); - assertEquals("c", exec("return (String)(Character)'c'")); - - assertEquals('c', exec("String s = \"c\"; (Character)s")); - assertEquals('c', exec("String s = 'c'; (Character)s")); - - try { - assertEquals("cc", exec("return (String)(Character)\"cc\"")); - fail(); - } catch (final ClassCastException ise) { - assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals("cc", exec("return (String)(Character)'cc'")); - fail(); - } catch (final ClassCastException ise) { - assertTrue(ise.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals('c', exec("String s = \"cc\"; (Character)s")); - fail(); - } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } - - try { - assertEquals('c', exec("String s = 'cc'; (Character)s")); - fail(); - } catch (final ClassCastException cce) { - assertTrue(cce.getMessage().contains("Cannot cast [String] with length greater than one to [Character].")); - } } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java new file mode 100644 index 00000000000..07439f7f42b --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/TryCatchTests.java @@ -0,0 +1,58 @@ +package org.elasticsearch.painless; + +import java.util.Collections; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** tests for throw/try/catch in painless */ +public class TryCatchTests extends ScriptTestCase { + + /** throws an exception */ + public void testThrow() { + RuntimeException exception = expectThrows(RuntimeException.class, () -> { + exec("throw new RuntimeException('test')"); + }); + assertEquals("test", exception.getMessage()); + } + + /** catches the exact exception */ + public void testCatch() { + assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (RuntimeException e) { return 1; } return 2;", + Collections.singletonMap("param", "true"))); + } + + /** catches superclass of the exception */ + public void testCatchSuperclass() { + assertEquals(1, exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (Exception e) { return 1; } return 2;", + Collections.singletonMap("param", "true"))); + } + + /** tries to catch a different type of exception */ + public void testNoCatch() { + RuntimeException exception = expectThrows(RuntimeException.class, () -> { + exec("try { if (params.param == 'true') throw new RuntimeException('test'); } " + + "catch (ArithmeticException e) { return 1; } return 2;", + Collections.singletonMap("param", "true")); + }); + assertEquals("test", exception.getMessage()); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java deleted file mode 100644 index ba476fac7f2..00000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/UtilityTests.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import org.elasticsearch.test.ESTestCase; - -/** - * Tests utility methods (typically built-ins) - */ -public class UtilityTests extends ESTestCase { - - public void testDivideWithoutOverflowInt() { - assertEquals(5 / 2, Utility.divideWithoutOverflow(5, 2)); - - try { - Utility.divideWithoutOverflow(Integer.MIN_VALUE, -1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5, 0); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowLong() { - assertEquals(5L / 2L, Utility.divideWithoutOverflow(5L, 2L)); - - try { - Utility.divideWithoutOverflow(Long.MIN_VALUE, -1L); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5L, 0L); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToByteExact() { - for (int b = Byte.MIN_VALUE; b < Byte.MAX_VALUE; b++) { - assertEquals((byte)b, Utility.toByteExact(b)); - } - - try { - Utility.toByteExact(Byte.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toByteExact(Byte.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToShortExact() { - for (int s = Short.MIN_VALUE; s < Short.MAX_VALUE; s++) { - assertEquals((short)s, Utility.toShortExact(s)); - } - - try { - Utility.toShortExact(Short.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toShortExact(Short.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testToCharExact() { - for (int c = Character.MIN_VALUE; c < Character.MAX_VALUE; c++) { - assertEquals((char)c, Utility.toCharExact(c)); - } - - try { - Utility.toCharExact(Character.MIN_VALUE - 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.toCharExact(Character.MAX_VALUE + 1); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddWithoutOverflowFloat() { - assertEquals(10F, Utility.addWithoutOverflow(5F, 5F), 0F); - assertTrue(Float.isNaN(Utility.addWithoutOverflow(5F, Float.NaN))); - assertTrue(Float.isNaN(Utility.addWithoutOverflow(Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY))); - - try { - Utility.addWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.addWithoutOverflow(-Float.MAX_VALUE, -Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testAddWithoutOverflowDouble() { - assertEquals(10D, Utility.addWithoutOverflow(5D, 5D), 0D); - assertTrue(Double.isNaN(Utility.addWithoutOverflow(5D, Double.NaN))); - assertTrue(Double.isNaN(Utility.addWithoutOverflow(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY))); - - try { - Utility.addWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.addWithoutOverflow(-Double.MAX_VALUE, -Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractWithoutOverflowFloat() { - assertEquals(5F, Utility.subtractWithoutOverflow(10F, 5F), 0F); - assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(5F, Float.NaN))); - assertTrue(Float.isNaN(Utility.subtractWithoutOverflow(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY))); - - try { - Utility.subtractWithoutOverflow(Float.MAX_VALUE, -Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.subtractWithoutOverflow(-Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testSubtractWithoutOverflowDouble() { - assertEquals(5D, Utility.subtractWithoutOverflow(10D, 5D), 0D); - assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(5D, Double.NaN))); - assertTrue(Double.isNaN(Utility.subtractWithoutOverflow(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY))); - - try { - Utility.subtractWithoutOverflow(Double.MAX_VALUE, -Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.subtractWithoutOverflow(-Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplyWithoutOverflowFloat() { - assertEquals(25F, Utility.multiplyWithoutOverflow(5F, 5F), 0F); - assertTrue(Float.isNaN(Utility.multiplyWithoutOverflow(5F, Float.NaN))); - assertEquals(Float.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5F, Float.POSITIVE_INFINITY), 0F); - - try { - Utility.multiplyWithoutOverflow(Float.MAX_VALUE, Float.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testMultiplyWithoutOverflowDouble() { - assertEquals(25D, Utility.multiplyWithoutOverflow(5D, 5D), 0D); - assertTrue(Double.isNaN(Utility.multiplyWithoutOverflow(5D, Double.NaN))); - assertEquals(Double.POSITIVE_INFINITY, Utility.multiplyWithoutOverflow(5D, Double.POSITIVE_INFINITY), 0D); - - try { - Utility.multiplyWithoutOverflow(Double.MAX_VALUE, Double.MAX_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowFloat() { - assertEquals(5F, Utility.divideWithoutOverflow(25F, 5F), 0F); - assertTrue(Float.isNaN(Utility.divideWithoutOverflow(5F, Float.NaN))); - assertEquals(Float.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Float.POSITIVE_INFINITY, 5F), 0F); - - try { - Utility.divideWithoutOverflow(Float.MAX_VALUE, Float.MIN_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(0F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testDivideWithoutOverflowDouble() { - assertEquals(5D, Utility.divideWithoutOverflow(25D, 5D), 0D); - assertTrue(Double.isNaN(Utility.divideWithoutOverflow(5D, Double.NaN))); - assertEquals(Double.POSITIVE_INFINITY, Utility.divideWithoutOverflow(Double.POSITIVE_INFINITY, 5D), 0D); - - try { - Utility.divideWithoutOverflow(Double.MAX_VALUE, Double.MIN_VALUE); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(0D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - - try { - Utility.divideWithoutOverflow(5D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderWithoutOverflowFloat() { - assertEquals(1F, Utility.remainderWithoutOverflow(25F, 4F), 0F); - - try { - Utility.remainderWithoutOverflow(5F, 0F); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } - - public void testRemainderWithoutOverflowDouble() { - assertEquals(1D, Utility.remainderWithoutOverflow(25D, 4D), 0D); - - try { - Utility.remainderWithoutOverflow(5D, 0D); - fail("did not get expected exception"); - } catch (ArithmeticException expected) {} - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 15ae321534e..9cdce7583f5 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -52,13 +52,13 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { "return y.isEmpty();"); }); assertEquals(3, exception.getStackTrace()[0].getLineNumber()); - + // trigger NPE at line 4 in script (inside conditional) exception = expectThrows(NullPointerException.class, () -> { exec("String x = null;\n" + "boolean y = false;\n" + "if (!y) {\n" + - " y = x.isEmpty();\n" + + " y = x.isEmpty();\n" + "}\n" + "return y;"); }); @@ -133,7 +133,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { exec("try { int x; } catch (PainlessError error) {}"); fail("should have hit ParseException"); }); - assertTrue(parseException.getMessage().contains("Not a type [PainlessError].")); + assertTrue(parseException.getMessage().contains("unexpected token ['PainlessError']")); } public void testLoopLimits() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java new file mode 100644 index 00000000000..aedecfc9c91 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/antlr/ParserTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.antlr; + +import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.BaseErrorListener; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.DiagnosticErrorListener; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Recognizer; +import org.antlr.v4.runtime.atn.PredictionMode; +import org.elasticsearch.painless.antlr.PainlessParser.SourceContext; +import org.elasticsearch.painless.ScriptTestCase; + +import java.text.ParseException; + +public class ParserTests extends ScriptTestCase { + private static class TestException extends RuntimeException { + TestException(String msg) { + super(msg); + } + } + + private SourceContext buildAntlrTree(String source) { + ANTLRInputStream stream = new ANTLRInputStream(source); + PainlessLexer lexer = new ErrorHandlingLexer(stream); + PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); + ParserErrorStrategy strategy = new ParserErrorStrategy(); + + lexer.removeErrorListeners(); + parser.removeErrorListeners(); + + // Diagnostic listener invokes syntaxError on other listeners for ambiguity issues, + parser.addErrorListener(new DiagnosticErrorListener(true)); + // a second listener to fail the test when the above happens. + parser.addErrorListener(new BaseErrorListener() { + @Override + public void syntaxError(final Recognizer recognizer, final Object offendingSymbol, final int line, + final int charPositionInLine, final String msg, final RecognitionException e) { + throw new TestException("line: " + line + ", offset: " + charPositionInLine + + ", symbol:" + offendingSymbol + " " + msg); + } + }); + + // Enable exact ambiguity detection (costly). we enable exact since its the default for + // DiagnosticErrorListener, life is too short to think about what 'inexact ambiguity' might mean. + parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); + parser.setErrorHandler(strategy); + + return parser.source(); + } + + public void testIllegalSecondary() { + //TODO: Need way more corner case tests. + Exception exception = expectThrows(TestException.class, () -> buildAntlrTree("(x = 5).y")); + assertTrue(exception.getMessage().contains("no viable alternative")); + exception = expectThrows(TestException.class, () -> buildAntlrTree("((x = 5).y = 2).z;")); + assertTrue(exception.getMessage().contains("no viable alternative")); + exception = expectThrows(TestException.class, () -> buildAntlrTree("(2 + 2).z")); + assertTrue(exception.getMessage().contains("no viable alternative")); + exception = expectThrows(RuntimeException.class, () -> buildAntlrTree("((Map)x.-x)")); + assertTrue(exception.getMessage().contains("unexpected character")); + } +} diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle new file mode 100644 index 00000000000..4e55fe25521 --- /dev/null +++ b/modules/percolator/build.gradle @@ -0,0 +1,26 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Percolator module adds capability to index queries and query these queries by specifying documents' + classname 'org.elasticsearch.percolator.PercolatorPlugin' +} + +compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes" +compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes" \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java similarity index 90% rename from core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java index 2f48a0c6439..f0c61b74e13 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -37,19 +37,16 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanContainingQuery; import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; @@ -72,14 +69,15 @@ public final class ExtractQueryTermsService { /** * Extracts all terms from the specified query and adds it to the specified document. - * @param query The query to extract terms from - * @param document The document to add the extracted terms to - * @param queryTermsFieldField The field in the document holding the extracted terms - * @param unknownQueryField The field used to mark a document that not all query terms could be extracted. For example - * the query contained an unsupported query (e.g. WildcardQuery). + * @param query The query to extract terms from + * @param document The document to add the extracted terms to + * @param queryTermsFieldField The field in the document holding the extracted terms + * @param unknownQueryField The field used to mark a document that not all query terms could be extracted. + * For example the query contained an unsupported query (e.g. WildcardQuery). * @param fieldType The field type for the query metadata field */ - public static void extractQueryTerms(Query query, ParseContext.Document document, String queryTermsFieldField, String unknownQueryField, FieldType fieldType) { + public static void extractQueryTerms(Query query, ParseContext.Document document, String queryTermsFieldField, + String unknownQueryField, FieldType fieldType) { Set queryTerms; try { queryTerms = extractQueryTerms(query); @@ -106,7 +104,10 @@ public final class ExtractQueryTermsService { * an UnsupportedQueryException is thrown. */ static Set extractQueryTerms(Query query) { - if (query instanceof TermQuery) { + if (query instanceof MatchNoDocsQuery) { + // no terms to extract as this query matches no docs + return Collections.emptySet(); + } else if (query instanceof TermQuery) { return Collections.singleton(((TermQuery) query).getTerm()); } else if (query instanceof TermsQuery) { Set terms = new HashSet<>(); @@ -145,7 +146,8 @@ public final class ExtractQueryTermsService { for (BooleanClause clause : clauses) { if (clause.isRequired() == false) { // skip must_not clauses, we don't need to remember the things that do *not* match... - // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, since they are completely optional. + // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, + // since they are completely optional. continue; } @@ -234,7 +236,8 @@ public final class ExtractQueryTermsService { /** * Creates a boolean query with a should clause for each term on all fields of the specified index reader. */ - public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, String unknownQueryField) throws IOException { + public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, + String unknownQueryField) throws IOException { Objects.requireNonNull(queryMetadataField); Objects.requireNonNull(unknownQueryField); diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java index 2b9538ae054..f80e1c45494 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java similarity index 99% rename from core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java index ac49fed763a..a18c12bb769 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequest.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequest.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; @@ -44,7 +44,10 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeSt /** * A multi percolate request that encapsulates multiple {@link PercolateRequest} instances in a single api call. + * + * @deprecated Instead use multi search API with {@link PercolateQueryBuilder} */ +@Deprecated public class MultiPercolateRequest extends ActionRequest implements CompositeIndicesRequest { private String[] indices; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequestBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequestBuilder.java similarity index 94% rename from core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequestBuilder.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequestBuilder.java index df36f9ff7f4..8613b8b07bd 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateRequestBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateRequestBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; @@ -24,7 +24,10 @@ import org.elasticsearch.client.ElasticsearchClient; /** * A builder for to ease the use of defining a {@link MultiPercolateRequest} instance. + * + * @deprecated Instead use multi search API with {@link PercolateQueryBuilder} */ +@Deprecated public class MultiPercolateRequestBuilder extends ActionRequestBuilder { public MultiPercolateRequestBuilder(ElasticsearchClient client, MultiPercolateAction action) { diff --git a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateResponse.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateResponse.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java index 3c626678e74..c05c0097c90 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/MultiPercolateResponse.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; @@ -36,7 +36,10 @@ import java.util.Iterator; * * Each item represents the response of a percolator request and the order of the items is in the same order as the * percolator requests were defined in the multi percolate request. + * + * @deprecated Instead use multi search API with {@link PercolateQueryBuilder} */ +@Deprecated public class MultiPercolateResponse extends ActionResponse implements Iterable, ToXContent { private Item[] items; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateAction.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/percolate/PercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateAction.java index 412f4e3ed62..94f5a0f04df 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java similarity index 87% rename from core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index e99687422db..2f3108d6298 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.percolator; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -34,7 +34,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.index.percolator.ExtractQueryTermsService; import java.io.IOException; import java.util.Objects; @@ -50,7 +49,7 @@ public final class PercolateQuery extends Query implements Accountable { public static class Builder { private final String docType; - private final QueryRegistry queryRegistry; + private final QueryStore queryStore; private final BytesReference documentSource; private final IndexSearcher percolatorIndexSearcher; @@ -59,15 +58,15 @@ public final class PercolateQuery extends Query implements Accountable { /** * @param docType The type of the document being percolated - * @param queryRegistry The registry holding all the percolator queries as Lucene queries. + * @param queryStore The lookup holding all the percolator queries as Lucene queries. * @param documentSource The source of the document being percolated * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated */ - public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { + public Builder(String docType, QueryStore queryStore, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { this.docType = Objects.requireNonNull(docType); this.documentSource = Objects.requireNonNull(documentSource); this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); - this.queryRegistry = Objects.requireNonNull(queryRegistry); + this.queryStore = Objects.requireNonNull(queryStore); } /** @@ -94,7 +93,6 @@ public final class PercolateQuery extends Query implements Accountable { if (percolateTypeQuery != null && queriesMetaDataQuery != null) { throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); } - // The query that selects which percolator queries will be evaluated by MemoryIndex: BooleanQuery.Builder builder = new BooleanQuery.Builder(); if (percolateTypeQuery != null) { @@ -103,24 +101,23 @@ public final class PercolateQuery extends Query implements Accountable { if (queriesMetaDataQuery != null) { builder.add(queriesMetaDataQuery, FILTER); } - - return new PercolateQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher); + return new PercolateQuery(docType, queryStore, documentSource, builder.build(), percolatorIndexSearcher); } } private final String documentType; - private final QueryRegistry queryRegistry; + private final QueryStore queryStore; private final BytesReference documentSource; private final Query percolatorQueriesQuery; private final IndexSearcher percolatorIndexSearcher; - private PercolateQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource, + private PercolateQuery(String documentType, QueryStore queryStore, BytesReference documentSource, Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) { this.documentType = documentType; this.documentSource = documentSource; this.percolatorQueriesQuery = percolatorQueriesQuery; - this.queryRegistry = queryRegistry; + this.queryStore = queryStore; this.percolatorIndexSearcher = percolatorIndexSearcher; } @@ -128,7 +125,7 @@ public final class PercolateQuery extends Query implements Accountable { public Query rewrite(IndexReader reader) throws IOException { Query rewritten = percolatorQueriesQuery.rewrite(reader); if (rewritten != percolatorQueriesQuery) { - return new PercolateQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher); + return new PercolateQuery(documentType, queryStore, documentSource, rewritten, percolatorIndexSearcher); } else { return this; } @@ -151,7 +148,7 @@ public final class PercolateQuery extends Query implements Accountable { if (result == docId) { if (twoPhaseIterator.matches()) { if (needsScores) { - QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); + QueryStore.Leaf percolatorQueries = queryStore.getQueries(leafReaderContext); Query query = percolatorQueries.getQuery(docId); Explanation detail = percolatorIndexSearcher.explain(query, 0); return Explanation.match(scorer.score(), "PercolateQuery", detail); @@ -181,9 +178,9 @@ public final class PercolateQuery extends Query implements Accountable { return null; } - final QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); + final QueryStore.Leaf queries = queryStore.getQueries(leafReaderContext); if (needsScores) { - return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { + return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { float score; @@ -209,7 +206,7 @@ public final class PercolateQuery extends Query implements Accountable { } }; } else { - return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { + return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { @Override public float score() throws IOException { @@ -238,6 +235,10 @@ public final class PercolateQuery extends Query implements Accountable { return documentSource; } + public QueryStore getQueryStore() { + return queryStore; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -276,13 +277,15 @@ public final class PercolateQuery extends Query implements Accountable { return sizeInBytes; } - public interface QueryRegistry { + @FunctionalInterface + public interface QueryStore { - Leaf getQueries(LeafReaderContext ctx); + Leaf getQueries(LeafReaderContext ctx) throws IOException; + @FunctionalInterface interface Leaf { - Query getQuery(int docId); + Query getQuery(int docId) throws IOException; } @@ -291,10 +294,10 @@ public final class PercolateQuery extends Query implements Accountable { static abstract class BaseScorer extends Scorer { final Scorer approximation; - final QueryRegistry.Leaf percolatorQueries; + final QueryStore.Leaf percolatorQueries; final IndexSearcher percolatorIndexSearcher; - BaseScorer(Weight weight, Scorer approximation, QueryRegistry.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) { + BaseScorer(Weight weight, Scorer approximation, QueryStore.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) { super(weight); this.approximation = approximation; this.percolatorQueries = percolatorQueries; diff --git a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java similarity index 77% rename from core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 8341459b155..627cb09da19 100644 --- a/core/src/main/java/org/elasticsearch/index/query/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -17,14 +17,17 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.percolator; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.BooleanClause; @@ -33,23 +36,27 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperForType; @@ -57,15 +64,21 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; import java.io.IOException; import java.util.List; import java.util.Objects; import static org.elasticsearch.index.mapper.SourceToParse.source; +import static org.elasticsearch.percolator.PercolatorFieldMapper.parseQuery; public class PercolateQueryBuilder extends AbstractQueryBuilder { @@ -146,7 +159,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { + LeafReader leafReader = ctx.reader(); + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); + if (binaryDocValues == null) { + return docId -> null; + } + + Bits bits = leafReader.getDocsWithField(fieldType.getQueryBuilderFieldName()); + return docId -> { + if (bits.get(docId)) { + BytesRef qbSource = binaryDocValues.get(docId); + if (qbSource.length > 0) { + XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(); + try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) { + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } + } else { + return null; + } + } else { + return null; + } + }; + }; + } + + private static PercolateQuery.QueryStore createLegacyStore(QueryShardContext context, boolean mapUnmappedFieldsAsString) { + return ctx -> { + LeafReader leafReader = ctx.reader(); + return docId -> { + LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor(); + leafReader.document(docId, visitor); + if (visitor.source == null) { + throw new IllegalStateException("No source found for document with docid [" + docId + "]"); + } + + try (XContentParser sourceParser = XContentHelper.createParser(visitor.source)) { + String currentFieldName = null; + XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT"); + } + while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = sourceParser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("query".equals(currentFieldName)) { + return parseQuery(context, mapUnmappedFieldsAsString, sourceParser); + } else { + sourceParser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + sourceParser.skipChildren(); + } + } + } + return null; + }; + }; + } + + private final static class LegacyQueryFieldVisitor extends StoredFieldVisitor { + + private BytesArray source; + + @Override + public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException { + source = new BytesArray(bytes); + } + + @Override + public Status needsField(FieldInfo fieldInfo) throws IOException { + if (source != null) { + return Status.STOP; + } + if (SourceFieldMapper.NAME.equals(fieldInfo.name)) { + return Status.YES; + } else { + return Status.NO; + } + } + + } + } diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java similarity index 97% rename from core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java index c9887cba03f..4509ab3cb50 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequest.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequest.java @@ -16,16 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -36,15 +34,16 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a percolate operation. + * + * @deprecated Instead use search API with {@link PercolateQueryBuilder} */ +@Deprecated public class PercolateRequest extends ActionRequest implements IndicesRequest.Replaceable { protected String[] indices; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java index 9490abd0b68..7d42382e76e 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateRequestBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.get.GetRequest; @@ -36,7 +36,10 @@ import java.util.Map; /** * A builder the easy to use of defining a percolate request. + * + * @deprecated Instead use search API with {@link PercolateQueryBuilder} */ +@Deprecated public class PercolateRequestBuilder extends ActionRequestBuilder { private PercolateSourceBuilder sourceBuilder; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java index 110ed37342f..622c27fca92 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateResponse.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateResponse.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -41,7 +41,10 @@ import java.util.Map; /** * Encapsulates the response of a percolator request. + * + * @deprecated Instead use search API with {@link PercolateQueryBuilder} */ +@Deprecated public class PercolateResponse extends BroadcastResponse implements Iterable, ToXContent { public static final Match[] EMPTY = new Match[0]; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java similarity index 98% rename from core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java index 5c69d3be50b..88e709447f7 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateSourceBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.support.ToXContentToBytes; @@ -44,7 +44,10 @@ import java.util.Map; /** * Builder to create the percolate request body. + * + * @deprecated Instead use search API with {@link PercolateQueryBuilder} */ +@Deprecated public class PercolateSourceBuilder extends ToXContentToBytes { private DocBuilder docBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java similarity index 81% rename from core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 59b980d59a4..bf5c4c0ae8b 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -16,18 +16,20 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -50,14 +52,15 @@ import java.util.Map; public class PercolatorFieldMapper extends FieldMapper { - @Deprecated - public static final String LEGACY_TYPE_NAME = ".percolator"; + public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; + public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); public static final String CONTENT_TYPE = "percolator"; private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); - private static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; - private static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; - static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; + public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; + public static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; + public static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; public static class Builder extends FieldMapper.Builder { @@ -172,23 +175,37 @@ public class PercolatorFieldMapper extends FieldMapper { this.queryTermsField = queryTermsField; this.unknownQueryField = unknownQueryField; this.queryBuilderField = queryBuilderField; - this.mapUnmappedFieldAsString = PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); + this.mapUnmappedFieldAsString = INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); + } + + @Override + public FieldMapper updateFieldType(Map fullNameToFieldType) { + PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType); + KeywordFieldMapper queryTermsUpdated = (KeywordFieldMapper) queryTermsField.updateFieldType(fullNameToFieldType); + KeywordFieldMapper unknownQueryUpdated = (KeywordFieldMapper) unknownQueryField.updateFieldType(fullNameToFieldType); + BinaryFieldMapper queryBuilderUpdated = (BinaryFieldMapper) queryBuilderField.updateFieldType(fullNameToFieldType); + + if (updated == this || queryTermsUpdated == queryTermsField || unknownQueryUpdated == unknownQueryField + || queryBuilderUpdated == queryBuilderField) { + return this; + } + if (updated == this) { + updated = (PercolatorFieldMapper) updated.clone(); + } + updated.queryTermsField = queryTermsUpdated; + updated.unknownQueryField = unknownQueryUpdated; + updated.queryBuilderField = queryBuilderUpdated; + return updated; } @Override public Mapper parse(ParseContext context) throws IOException { QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext); - DocumentMapper documentMapper = queryShardContext.getMapperService().documentMapper(context.sourceToParse().type()); - for (FieldMapper fieldMapper : documentMapper.mappers()) { - if (fieldMapper instanceof PercolatorFieldMapper) { - PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType(); - if (context.doc().getField(fieldType.getQueryBuilderFieldName()) != null) { - // If a percolator query has been defined in an array object then multiple percolator queries - // could be provided. In order to prevent this we fail if we try to parse more than one query - // for the current document. - throw new IllegalArgumentException("a document can only contain one percolator query"); - } - } + if (context.doc().getField(queryBuilderField.name()) != null) { + // If a percolator query has been defined in an array object then multiple percolator queries + // could be provided. In order to prevent this we fail if we try to parse more than one query + // for the current document. + throw new IllegalArgumentException("a document can only contain one percolator query"); } XContentParser parser = context.parser(); @@ -196,7 +213,7 @@ public class PercolatorFieldMapper extends FieldMapper { // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); - try (XContentBuilder builder = XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE)) { + try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); byte[] queryBuilderAsBytes = builder.bytes().toBytes(); @@ -204,7 +221,8 @@ public class PercolatorFieldMapper extends FieldMapper { } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); - ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), queryTermsField.fieldType()); + ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), + queryTermsField.fieldType()); return null; } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java similarity index 91% rename from core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 9fb6934cca4..41a43d39328 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; @@ -31,7 +31,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.index.query.PercolateQuery; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.highlight.HighlightPhase; @@ -40,12 +39,13 @@ import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; +import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; // Highlighting in the case of the percolate query is a bit different, because the PercolateQuery itself doesn't get highlighted, -// but the source of the PercolateQuery gets highlighted by each hit with type '.percolator' (percolator queries). +// but the source of the PercolateQuery gets highlighted by each hit containing a query. public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { private final HighlightPhase highlightPhase; @@ -69,8 +69,8 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { } List ctxs = context.searcher().getIndexReader().leaves(); - PercolatorQueryCache queriesRegistry = context.percolatorQueryCache(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); + PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore(); LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); @@ -78,9 +78,14 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase { createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource()); for (InternalSearchHit hit : hits) { - LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); - int segmentDocId = hit.docId() - ctx.docBase; - Query query = queriesRegistry.getQueries(ctx).getQuery(segmentDocId); + final Query query; + try { + LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); + int segmentDocId = hit.docId() - ctx.docBase; + query = queryStore.getQueries(ctx).getQuery(segmentDocId); + } catch (IOException e) { + throw new RuntimeException(e); + } if (query != null) { subSearchContext.parsedQuery(new ParsedQuery(query)); hitContext.reset( diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java new file mode 100644 index 00000000000..963f14041b0 --- /dev/null +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.percolator; + +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.SearchModule; + +public class PercolatorPlugin extends Plugin { + + public static final String NAME = "percolator"; + + private final boolean transportClientMode; + + public PercolatorPlugin(Settings settings) { + this.transportClientMode = transportClientMode(settings); + } + + @Override + public String name() { + return NAME; + } + + @Override + public String description() { + return "Percolator module adds capability to index queries and query these queries by specifying documents"; + } + + public void onModule(ActionModule module) { + module.registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); + module.registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class); + } + + public void onModule(NetworkModule module) { + if (transportClientMode == false) { + module.registerRestHandler(RestPercolateAction.class); + module.registerRestHandler(RestMultiPercolateAction.class); + } + } + + public void onModule(IndicesModule module) { + module.registerMapper(PercolatorFieldMapper.CONTENT_TYPE, new PercolatorFieldMapper.TypeParser()); + } + + public void onModule(SearchModule module) { + module.registerQuery(PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent, PercolateQueryBuilder.QUERY_NAME_FIELD); + module.registerFetchSubPhase(PercolatorHighlightSubFetchPhase.class); + } + + public void onModule(SettingsModule module) { + module.registerSetting(PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); + } + + static boolean transportClientMode(Settings settings) { + return TransportClient.CLIENT_TYPE.equals(settings.get(Client.CLIENT_TYPE_SETTING_S.getKey())); + } +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java similarity index 89% rename from core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java index 6a12ff1438b..a2902a9a7c2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java @@ -16,10 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest.action.percolate; +package org.elasticsearch.percolator; -import org.elasticsearch.action.percolate.MultiPercolateRequest; -import org.elasticsearch.action.percolate.MultiPercolateResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; @@ -35,16 +33,16 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -/** - * - */ public class RestMultiPercolateAction extends BaseRestHandler { private final boolean allowExplicitIndex; + private final TransportMultiPercolateAction action; @Inject - public RestMultiPercolateAction(Settings settings, RestController controller, Client client) { + public RestMultiPercolateAction(Settings settings, RestController controller, Client client, + TransportMultiPercolateAction action) { super(settings, client); + this.action = action; controller.registerHandler(POST, "/_mpercolate", this); controller.registerHandler(POST, "/{index}/_mpercolate", this); controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this); @@ -63,8 +61,7 @@ public class RestMultiPercolateAction extends BaseRestHandler { multiPercolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index"))); multiPercolateRequest.documentType(restRequest.param("type")); multiPercolateRequest.add(RestActions.getRestContent(restRequest), allowExplicitIndex); - - client.multiPercolate(multiPercolateRequest, new RestToXContentListener(restChannel)); + action.execute(multiPercolateRequest, new RestToXContentListener(restChannel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java similarity index 93% rename from core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java index 91ecc9dbd6d..b752cc55f6c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/percolate/RestPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java @@ -16,11 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.percolate.PercolateRequest; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; @@ -37,14 +35,14 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -/** - * - */ public class RestPercolateAction extends BaseRestHandler { + private final TransportPercolateAction action; + @Inject - public RestPercolateAction(Settings settings, RestController controller, Client client) { + public RestPercolateAction(Settings settings, RestController controller, Client client, TransportPercolateAction action) { super(settings, client); + this.action = action; controller.registerHandler(GET, "/{index}/{type}/_percolate", this); controller.registerHandler(POST, "/{index}/{type}/_percolate", this); @@ -69,7 +67,7 @@ public class RestPercolateAction extends BaseRestHandler { percolateRequest.source(RestActions.getRestContent(restRequest)); percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions())); - executePercolate(percolateRequest, restChannel, client); + executePercolate(percolateRequest, restChannel); } void parseExistingDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, final Client client) { @@ -93,11 +91,11 @@ public class RestPercolateAction extends BaseRestHandler { percolateRequest.source(RestActions.getRestContent(restRequest)); percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions())); - executePercolate(percolateRequest, restChannel, client); + executePercolate(percolateRequest, restChannel); } - void executePercolate(final PercolateRequest percolateRequest, final RestChannel restChannel, final Client client) { - client.percolate(percolateRequest, new RestToXContentListener(restChannel)); + void executePercolate(final PercolateRequest percolateRequest, final RestChannel restChannel) { + action.execute(percolateRequest, new RestToXContentListener<>(restChannel)); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java similarity index 99% rename from core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java index bf0d79d884e..2b9f77ab82a 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportPercolateAction.java similarity index 99% rename from core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/TransportPercolateAction.java index b80589df936..f3ee9230eaa 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportPercolateAction.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -42,7 +42,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; -import org.elasticsearch.index.query.PercolateQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; diff --git a/core/src/test/java/org/elasticsearch/index/percolator/ExtractQueryTermsServiceTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java similarity index 95% rename from core/src/test/java/org/elasticsearch/index/percolator/ExtractQueryTermsServiceTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java index 73f2da8c6d7..4245c99ba18 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/ExtractQueryTermsServiceTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.FieldType; @@ -41,6 +41,7 @@ import org.apache.lucene.search.spans.SpanNotQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.test.ESTestCase; @@ -296,6 +297,23 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { assertTermsEqual(terms, spanTermQuery1.getTerm()); } + public void testExtractQueryMetadata_matchNoDocsQuery() { + Set terms = ExtractQueryTermsService.extractQueryTerms(new MatchNoDocsQuery("sometimes there is no reason at all")); + assertEquals(0, terms.size()); + + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); + bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST); + terms = ExtractQueryTermsService.extractQueryTerms(bq.build()); + assertEquals(0, terms.size()); + + bq = new BooleanQuery.Builder(); + bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); + bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD); + terms = ExtractQueryTermsService.extractQueryTerms(bq.build()); + assertTermsEqual(terms, new Term("field", "value")); + } + public void testExtractQueryMetadata_unsupportedQuery() { TermRangeQuery termRangeQuery = new TermRangeQuery("_field", null, null, true, false); diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java similarity index 83% rename from core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 6ec9ab24960..efa6ecee59a 100644 --- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -19,20 +19,20 @@ package org.elasticsearch.percolator; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.action.percolate.MultiPercolateRequestBuilder; -import org.elasticsearch.action.percolate.MultiPercolateResponse; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; +import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; @@ -42,7 +42,9 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate; +import static org.elasticsearch.percolator.PercolatorTestUtil.prepareMultiPercolate; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.arrayWithSize; @@ -58,6 +60,16 @@ public class MultiPercolatorIT extends ESIntegTestCase { private final static String INDEX_NAME = "queries"; private final static String TYPE_NAME = "query"; + @Override + protected Collection> nodePlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + public void testBasics() throws Exception { assertAcked(prepareCreate(INDEX_NAME) .addMapping(TYPE_NAME, "query", "type=percolator") @@ -82,20 +94,20 @@ public class MultiPercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - MultiPercolateResponse response = client().prepareMultiPercolate() - .add(client().preparePercolate() + MultiPercolateResponse response = prepareMultiPercolate(client()) + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))) - .add(client().preparePercolate() // non existing doc, so error element + .add(preparePercolate(client()) // non existing doc, so error element .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5"))) .execute().actionGet(); @@ -158,24 +170,24 @@ public class MultiPercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - MultiPercolateResponse response = client().prepareMultiPercolate() - .add(client().preparePercolate() + MultiPercolateResponse response = prepareMultiPercolate(client()) + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject()))) - .add(client().preparePercolate() + .add(preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject()))) - .add(client().preparePercolate() // non existing doc, so error element + .add(preparePercolate(client()) // non existing doc, so error element .setIndices(INDEX_NAME).setDocumentType("type") .setRouting("a") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5"))) @@ -227,11 +239,11 @@ public class MultiPercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - MultiPercolateRequestBuilder builder = client().prepareMultiPercolate(); + MultiPercolateRequestBuilder builder = prepareMultiPercolate(client()); int numPercolateRequest = randomIntBetween(50, 100); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(numQueries) @@ -247,10 +259,10 @@ public class MultiPercolatorIT extends ESIntegTestCase { } // Non existing doc - builder = client().prepareMultiPercolate(); + builder = prepareMultiPercolate(client()); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) @@ -266,16 +278,16 @@ public class MultiPercolatorIT extends ESIntegTestCase { } // One existing doc - builder = client().prepareMultiPercolate(); + builder = prepareMultiPercolate(client()); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) ); } builder.add( - client().preparePercolate() + preparePercolate(client()) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .setIndices(INDEX_NAME).setDocumentType("type").setSize(numQueries) ); @@ -300,11 +312,11 @@ public class MultiPercolatorIT extends ESIntegTestCase { } refresh(); - MultiPercolateRequestBuilder builder = client().prepareMultiPercolate(); + MultiPercolateRequestBuilder builder = prepareMultiPercolate(client()); int numPercolateRequest = randomIntBetween(50, 100); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(numQueries) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject()))); @@ -319,10 +331,10 @@ public class MultiPercolatorIT extends ESIntegTestCase { } // All illegal json - builder = client().prepareMultiPercolate(); + builder = prepareMultiPercolate(client()); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSource("illegal json")); } @@ -335,15 +347,15 @@ public class MultiPercolatorIT extends ESIntegTestCase { } // one valid request - builder = client().prepareMultiPercolate(); + builder = prepareMultiPercolate(client()); for (int i = 0; i < numPercolateRequest; i++) { builder.add( - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSource("illegal json")); } builder.add( - client().preparePercolate() + preparePercolate(client()) .setSize(numQueries) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "a").endObject()))); @@ -357,43 +369,15 @@ public class MultiPercolatorIT extends ESIntegTestCase { public void testNestedMultiPercolation() throws IOException { initNestedIndexAndPercolation(); - MultiPercolateRequestBuilder mpercolate= client().prepareMultiPercolate(); - mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); - mpercolate.add(client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); + MultiPercolateRequestBuilder mpercolate= prepareMultiPercolate(client()); + mpercolate.add(preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); + mpercolate.add(preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company")); MultiPercolateResponse response = mpercolate.get(); assertEquals(response.getItems()[0].getResponse().getMatches().length, 0); assertEquals(response.getItems()[1].getResponse().getMatches().length, 1); assertEquals(response.getItems()[1].getResponse().getMatches()[0].getId().string(), "Q"); } - public void testStartTimeIsPropagatedToShardRequests() throws Exception { - // See: https://github.com/elastic/elasticsearch/issues/15908 - internalCluster().ensureAtLeastNumDataNodes(2); - client().admin().indices().prepareCreate(INDEX_NAME) - .setSettings(Settings.builder() - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 1) - ) - .addMapping("type", "date_field", "type=date,format=strict_date_optional_time||epoch_millis") - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - ensureGreen(); - - client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("date_field").lt("now+90d")).endObject()) - .setRefresh(true) - .get(); - - for (int i = 0; i < 32; i++) { - MultiPercolateResponse response = client().prepareMultiPercolate() - .add(client().preparePercolate().setDocumentType("type").setIndices(INDEX_NAME) - .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("date_field", "2015-07-21T10:28:01-07:00"))) - .get(); - assertThat(response.getItems()[0].getResponse().getCount(), equalTo(1L)); - assertThat(response.getItems()[0].getResponse().getMatches()[0].getId().string(), equalTo("1")); - } - } - void initNestedIndexAndPercolation() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject() diff --git a/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorRequestTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorRequestTests.java index 16251463d57..6f64dc63189 100644 --- a/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorRequestTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action.percolate; +package org.elasticsearch.percolator; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.collect.MapBuilder; @@ -30,11 +30,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -/** - */ public class MultiPercolatorRequestTests extends ESTestCase { public void testParseBulkRequests() throws Exception { - byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate1.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/percolator/mpercolate1.json"); MultiPercolateRequest request = new MultiPercolateRequest().add(data, 0, data.length); assertThat(request.requests().size(), equalTo(8)); @@ -150,7 +148,7 @@ public class MultiPercolatorRequestTests extends ESTestCase { } public void testParseBulkRequestsDefaults() throws Exception { - byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate2.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/percolator/mpercolate2.json"); MultiPercolateRequest request = new MultiPercolateRequest(); request.indices("my-index1").documentType("my-type1").indicesOptions(IndicesOptions.lenientExpandOpen()); request.add(data, 0, data.length); diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java similarity index 88% rename from core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 690b2c03a2f..c090ebf6dbf 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -17,10 +17,9 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.percolator; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ResourceNotFoundException; @@ -35,13 +34,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.script.Script; import org.hamcrest.Matchers; import org.junit.BeforeClass; import java.io.IOException; +import java.util.Collection; import java.util.Collections; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -62,11 +67,15 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { queryField = randomAsciiOfLength(4); docType = randomAsciiOfLength(4); - MapperService mapperService = createShardContext().getMapperService(); mapperService.merge("query_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("query_type", queryField, "type=percolator" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); @@ -182,38 +191,38 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase { - QueryBuilders.percolateQuery(null, null, new BytesArray("{}")); + new PercolateQueryBuilder(null, null, new BytesArray("{}")); }); assertThat(e.getMessage(), equalTo("[field] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolateQuery("_field", null, new BytesArray("{}"))); + e = expectThrows(IllegalArgumentException.class, () -> new PercolateQueryBuilder("_field", null, new BytesArray("{}"))); assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> QueryBuilders.percolateQuery("_field", "_document_type", null)); + e = expectThrows(IllegalArgumentException.class, () -> new PercolateQueryBuilder("_field", "_document_type", null)); assertThat(e.getMessage(), equalTo("[document] is a required argument")); e = expectThrows(IllegalArgumentException.class, () -> { - QueryBuilders.percolateQuery(null, null, "_index", "_type", "_id", null, null, null); + new PercolateQueryBuilder(null, null, "_index", "_type", "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[field] is a required argument")); e = expectThrows(IllegalArgumentException.class, () -> { - QueryBuilders.percolateQuery("_field", null, "_index", "_type", "_id", null, null, null); + new PercolateQueryBuilder("_field", null, "_index", "_type", "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[document_type] is a required argument")); e = expectThrows(IllegalArgumentException.class, () -> { - QueryBuilders.percolateQuery("_field", "_document_type", null, "_type", "_id", null, null, null); + new PercolateQueryBuilder("_field", "_document_type", null, "_type", "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[index] is a required argument")); e = expectThrows(IllegalArgumentException.class, () -> { - QueryBuilders.percolateQuery("_field", "_document_type", "_index", null, "_id", null, null, null); + new PercolateQueryBuilder("_field", "_document_type", "_index", null, "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[type] is a required argument")); e = expectThrows(IllegalArgumentException.class, () -> { - QueryBuilders.percolateQuery("_field", "_document_type", "_index", "_type", null, null, null, null); + new PercolateQueryBuilder("_field", "_document_type", "_index", "_type", null, null, null, null); }); assertThat(e.getMessage(), equalTo("[id] is a required argument")); } diff --git a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index acb4917b421..07959db1ff1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.percolator; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Field; @@ -52,11 +52,10 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.percolator.ExtractQueryTermsService; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; @@ -84,14 +83,14 @@ public class PercolateQueryTests extends ESTestCase { private Directory directory; private IndexWriter indexWriter; private Map queries; - private PercolateQuery.QueryRegistry queryRegistry; + private PercolateQuery.QueryStore queryStore; private DirectoryReader directoryReader; @Before public void init() throws Exception { directory = newDirectory(); queries = new HashMap<>(); - queryRegistry = ctx -> docId -> { + queryStore = ctx -> docId -> { try { String val = ctx.reader().document(docId).get(UidFieldMapper.NAME); return queries.get(Uid.createUid(val).id()); @@ -145,7 +144,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -219,7 +218,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -324,7 +323,7 @@ public class PercolateQueryTests extends ESTestCase { ParseContext.Document document = new ParseContext.Document(); ExtractQueryTermsService.extractQueryTerms(query, document, EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME, EXTRACTED_TERMS_FIELD_TYPE); - document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(PercolatorFieldMapper.LEGACY_TYPE_NAME, id))); + document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(MapperService.PERCOLATOR_LEGACY_TYPE_NAME, id))); assert extraFields.length % 2 == 0; for (int i = 0; i < extraFields.length; i++) { document.add(new StringField(extraFields[i], extraFields[++i], Field.Store.NO)); @@ -336,7 +335,7 @@ public class PercolateQueryTests extends ESTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery.Builder builder1 = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); @@ -346,7 +345,7 @@ public class PercolateQueryTests extends ESTestCase { PercolateQuery.Builder builder2 = new PercolateQuery.Builder( "docType", - queryRegistry, + queryStore, new BytesArray("{}"), percolateSearcher ); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java similarity index 92% rename from core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index 167110f0333..c8f79d3b977 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -18,11 +18,9 @@ */ package org.elasticsearch.percolator; -import org.elasticsearch.action.percolate.PercolateRequestBuilder; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; @@ -34,14 +32,17 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBuck import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; +import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; @@ -55,6 +56,16 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { private final static String INDEX_NAME = "queries"; private final static String TYPE_NAME = "query"; + @Override + protected Collection> nodePlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + // Just test the integration with facets and aggregations, not the facet and aggregation functionality! public void testAggregations() throws Exception { assertAcked(prepareCreate(INDEX_NAME) @@ -83,7 +94,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = values[i % numUniqueQueries]; - PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() + PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) @@ -150,7 +161,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = values[i % numUniqueQueries]; - PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() + PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) @@ -204,7 +215,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { .addMapping(TYPE_NAME, "query", "type=percolator") .execute().actionGet(); ensureGreen(); - PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("type") + PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject())) .addAggregation(AggregationBuilders.significantTerms("a").field("field2")); PercolateResponse response = percolateRequestBuilder.get(); @@ -232,7 +243,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { for (int i = 0; i < numQueries; i++) { String value = "value0"; - PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate() + PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject())) diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java similarity index 86% rename from core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java index fbad26a5b6b..1071ffe759b 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java @@ -21,34 +21,47 @@ package org.elasticsearch.percolator; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) @LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { +// Can'r run as IT as the test cluster is immutable and this test adds nodes during the test +public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase { private final static String INDEX_NAME = "percolator_index"; + @Override + protected Collection> nodePlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + public void testOldPercolatorIndex() throws Exception { setupNode(); @@ -76,7 +89,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(2).id(), equalTo("3")); // verify percolate response - PercolateResponse percolateResponse = client().preparePercolate() + PercolateResponse percolateResponse = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("message") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("message", "the quick brown fox jumps over the lazy dog")) @@ -93,7 +106,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { .get(); refresh(); - percolateResponse = client().preparePercolate() + percolateResponse = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("message") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("message", "the quick brown fox jumps over the lazy dog")) @@ -109,7 +122,8 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { private void setupNode() throws Exception { Path dataDir = createTempDir(); Path clusterDir = Files.createDirectory(dataDir.resolve(cluster().getClusterName())); - try (InputStream stream = PercolatorBackwardsCompatibilityIT.class.getResourceAsStream("/indices/percolator/bwc_index_2.0.0.zip")) { + try (InputStream stream = PercolatorBackwardsCompatibilityTests.class. + getResourceAsStream("/indices/percolator/bwc_index_2.0.0.zip")) { TestUtil.unzip(stream, clusterDir); } diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java similarity index 90% rename from core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 604670f179e..bcb9ed8d40d 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; @@ -32,13 +32,15 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.percolator.PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; @@ -59,6 +61,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { private MapperService mapperService; private PercolatorFieldMapper.PercolatorFieldType fieldType; + @Override + protected Collection> getPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + @Before public void init() throws Exception { indexService = createIndex("test", Settings.EMPTY); @@ -120,7 +127,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() + ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + XContentFactory.jsonBuilder().startObject() .field(fieldName, query) .endObject().bytes()); BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); @@ -192,11 +200,21 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .startObject("query_field2").field("type", "percolator").endObject() .endObject() .endObject().endObject().string(); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { - mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); - }); - assertThat(exception.getMessage(), equalTo("Up to one percolator field type is allowed per index, " + - "found the following percolator fields [[query_field1, query_field2]]")); + mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + + QueryBuilder queryBuilder = matchQuery("field", "value"); + ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .field("query_field1", queryBuilder) + .field("query_field2", queryBuilder) + .endObject().bytes() + ); + assertThat(doc.rootDoc().getFields().size(), equalTo(22)); // also includes all other meta fields + BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); + assertQueryBuilder(queryBuilderAsBytes, queryBuilder); + + queryBuilderAsBytes = doc.rootDoc().getField("query_field2.query_builder_field").binaryValue(); + assertQueryBuilder(queryBuilderAsBytes, queryBuilder); } // percolator field can be nested under an object field, but only one query can be specified per document @@ -251,7 +269,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { } private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException { - XContentParser sourceParser = QUERY_BUILDER_CONTENT_TYPE.xContent().createParser(actual.bytes, actual.offset, actual.length); + XContentParser sourceParser = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent() + .createParser(actual.bytes, actual.offset, actual.length); QueryParseContext qsc = indexService.newQueryShardContext().newParseContext(sourceParser); assertThat(qsc.parseInnerQueryBuilder(), equalTo(expected)); } diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index fb130d5dde0..3f7aaafc105 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator; +package org.elasticsearch.percolator; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -25,7 +25,6 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.index.query.PercolateQuery; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java similarity index 92% rename from core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 77b67227401..e4b318f5d39 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -24,9 +24,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.percolate.PercolateResponse; -import org.elasticsearch.action.percolate.PercolateSourceBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Requests; @@ -43,13 +40,15 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.functionscore.WeightBuilder; -import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -60,7 +59,7 @@ import java.util.NavigableSet; import java.util.Set; import java.util.TreeSet; -import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; +import static org.elasticsearch.percolator.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; @@ -71,20 +70,19 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction; import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.assertMatchCount; +import static org.elasticsearch.percolator.PercolatorTestUtil.preparePercolate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -98,6 +96,16 @@ public class PercolatorIT extends ESIntegTestCase { private final static String INDEX_NAME = "queries"; private final static String TYPE_NAME = "query"; + @Override + protected Collection> nodePlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + public void testSimple1() throws Exception { client().admin().indices().prepareCreate(INDEX_NAME).addMapping(TYPE_NAME, "query", "type=percolator").get(); ensureGreen(); @@ -124,7 +132,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate doc with field1=b"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())) .execute().actionGet(); @@ -133,7 +141,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate doc with field1=c"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())) .execute().actionGet(); @@ -142,7 +150,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate doc with field1=b c"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())) .execute().actionGet(); @@ -151,7 +159,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate doc with field1=d"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())) .execute().actionGet(); @@ -161,7 +169,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate non existing doc"); try { - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")) .execute().actionGet(); @@ -184,7 +192,7 @@ public class PercolatorIT extends ESIntegTestCase { .field("field2", "value") .endObject().endObject(); - PercolateResponse response = client().preparePercolate().setSource(doc) + PercolateResponse response = preparePercolate(client()).setSource(doc) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .execute().actionGet(); assertMatchCount(response, 0L); @@ -196,7 +204,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType(TYPE_NAME) .setSource(doc).execute().actionGet(); assertMatchCount(response, 1L); @@ -209,7 +217,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(doc) .execute().actionGet(); @@ -220,7 +228,7 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareDelete(INDEX_NAME, TYPE_NAME, "test2").execute().actionGet(); refresh(); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(doc).execute().actionGet(); assertMatchCount(response, 1L); @@ -246,7 +254,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate doc with no routing"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) @@ -255,7 +263,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(100)); logger.info("--> Percolate doc with routing=0"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) @@ -265,7 +273,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(50)); logger.info("--> Percolate doc with routing=1"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject())) .setSize(100) @@ -334,7 +342,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("doc") .setSource(jsonBuilder().startObject() .startObject("doc").field("filingcategory", "s").endObject() @@ -368,7 +376,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); @@ -377,7 +385,7 @@ public class PercolatorIT extends ESIntegTestCase { } for (int i = 0; i < 10; i++) { - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setPreference("_local") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) @@ -421,7 +429,7 @@ public class PercolatorIT extends ESIntegTestCase { .setRefresh(true) .execute().actionGet(); - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); @@ -429,7 +437,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(percolate.getMatches(), arrayWithSize(1)); assertThat(convertFromTextArray(percolate.getMatches(), INDEX_NAME), arrayContaining("kuku")); - percolate = client().preparePercolate() + percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject()) .execute().actionGet(); @@ -456,7 +464,7 @@ public class PercolatorIT extends ESIntegTestCase { .setRefresh(true) .execute().actionGet(); - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value1").endObject().endObject()) .execute().actionGet(); @@ -473,7 +481,7 @@ public class PercolatorIT extends ESIntegTestCase { .setRefresh(true) .execute().actionGet(); - percolate = client().preparePercolate() + percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject()) .execute().actionGet(); @@ -493,7 +501,7 @@ public class PercolatorIT extends ESIntegTestCase { PercolateSourceBuilder sourceBuilder = new PercolateSourceBuilder() .setDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value2").endObject())) .setQueryBuilder(termQuery("color", "red")); - percolate = client().preparePercolate() + percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(sourceBuilder) .execute().actionGet(); @@ -504,7 +512,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> deleting query 1"); client().prepareDelete(INDEX_NAME, TYPE_NAME, "kuku").setRefresh(true).execute().actionGet(); - percolate = client().preparePercolate() + percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type1") .setSource(jsonBuilder().startObject().startObject("doc").startObject("type1") .field("field1", "value1") @@ -514,38 +522,6 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(percolate.getMatches(), emptyArray()); } - public void testPercolateStatistics() throws Exception { - client().admin().indices().prepareCreate(INDEX_NAME) - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - client().admin().indices().prepareCreate("test2") - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - ensureGreen(); - - logger.info("--> register a query"); - client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - client().prepareIndex("test2", TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) - .execute().actionGet(); - refresh(); - - logger.info("--> First percolate request"); - PercolateResponse response = client().preparePercolate() - .setIndices(INDEX_NAME).setDocumentType("type") - .setSource(jsonBuilder().startObject().startObject("doc").field("field", "val").endObject().endObject()) - .execute().actionGet(); - assertMatchCount(response, 1L); - assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContaining("1")); - - NumShards numShards = getNumShards(INDEX_NAME); - - IndicesStatsResponse indicesResponse = client().admin().indices().prepareStats(INDEX_NAME).execute().actionGet(); - assertThat(indicesResponse.getTotal().getPercolatorCache().getNumQueries(), equalTo((long)numShards.dataCopies)); // number of copies - } - public void testPercolatingExistingDocs() throws Exception { client().admin().indices().prepareCreate(INDEX_NAME) .addMapping(TYPE_NAME, "query", "type=percolator") @@ -577,7 +553,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate existing doc with id 1"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .execute().actionGet(); @@ -586,7 +562,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate existing doc with id 2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .execute().actionGet(); @@ -595,7 +571,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate existing doc with id 3"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3")) .execute().actionGet(); @@ -604,7 +580,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate existing doc with id 4"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4")) .execute().actionGet(); @@ -644,7 +620,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate existing doc with id 1"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1").routing("4")) .execute().actionGet(); @@ -653,7 +629,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "4")); logger.info("--> Percolate existing doc with id 2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").routing("3")) .execute().actionGet(); @@ -662,7 +638,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("2", "4")); logger.info("--> Percolate existing doc with id 3"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3").routing("2")) .execute().actionGet(); @@ -671,7 +647,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2", "3", "4")); logger.info("--> Percolate existing doc with id 4"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4").routing("1")) .execute().actionGet(); @@ -711,7 +687,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate existing doc with id 2 and version 1"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(1L)) .execute().actionGet(); @@ -721,7 +697,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Percolate existing doc with id 2 and version 2"); try { - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(2L)) .execute().actionGet(); @@ -733,7 +709,7 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex(INDEX_NAME, "type", "2").setSource("field1", "c").execute().actionGet(); logger.info("--> Percolate existing doc with id 2 and version 2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2").version(2L)) .execute().actionGet(); @@ -757,7 +733,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate doc to index test1"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -765,7 +741,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(5)); logger.info("--> Percolate doc to index test2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME + "2").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -773,7 +749,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(5)); logger.info("--> Percolate doc to index test1 and test2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME, INDEX_NAME + "2").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -781,7 +757,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(10)); logger.info("--> Percolate doc to index test2 and test3, with ignore missing"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME , INDEX_NAME + "3").setDocumentType("type") .setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) @@ -799,7 +775,7 @@ public class PercolatorIT extends ESIntegTestCase { assertTrue(aliasesResponse.isAcknowledged()); logger.info("--> Percolate doc to my-alias1"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("my-alias1").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -810,7 +786,7 @@ public class PercolatorIT extends ESIntegTestCase { } logger.info("--> Percolate doc to my-alias2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("my-alias2").setDocumentType("type") .setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value").endObject().endObject()) .execute().actionGet(); @@ -840,7 +816,7 @@ public class PercolatorIT extends ESIntegTestCase { // Specifying only the document to percolate and no filter, sorting or aggs, the queries are retrieved from // memory directly. Otherwise we need to retrieve those queries from lucene to be able to execute filters, // aggregations and sorting on top of them. So this test a different code execution path. - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices("a") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -849,7 +825,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getCount(), equalTo(1L)); assertThat(response.getMatches()[0].getId().string(), equalTo("1")); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("b") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -859,7 +835,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches()[0].getId().string(), equalTo("2")); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("c") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -868,7 +844,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getCount(), equalTo(0L)); // Testing that the alias filter and the filter specified while percolating are both taken into account. - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("a") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -878,7 +854,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getCount(), equalTo(1L)); assertThat(response.getMatches()[0].getId().string(), equalTo("1")); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("b") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -889,7 +865,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches()[0].getId().string(), equalTo("2")); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices("c") .setDocumentType("my-type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) @@ -927,7 +903,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Count percolate doc with field1=b"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject())) .execute().actionGet(); @@ -935,7 +911,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate doc with field1=c"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(yamlBuilder().startObject().field("field1", "c").endObject())) .execute().actionGet(); @@ -943,7 +919,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate doc with field1=b c"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(smileBuilder().startObject().field("field1", "b c").endObject())) .execute().actionGet(); @@ -951,7 +927,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate doc with field1=d"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "d").endObject())) .execute().actionGet(); @@ -960,7 +936,7 @@ public class PercolatorIT extends ESIntegTestCase { logger.info("--> Count percolate non existing doc"); try { - client().preparePercolate() + preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("5")) .execute().actionGet(); @@ -1001,7 +977,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Count percolate existing doc with id 1"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) .execute().actionGet(); @@ -1009,7 +985,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 2"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("2")) .execute().actionGet(); @@ -1017,7 +993,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 3"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("3")) .execute().actionGet(); @@ -1025,7 +1001,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), nullValue()); logger.info("--> Count percolate existing doc with id 4"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type").setOnlyCount(true) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("4")) .execute().actionGet(); @@ -1053,7 +1029,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); boolean onlyCount = randomBoolean(); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1065,7 +1041,7 @@ public class PercolatorIT extends ESIntegTestCase { } int size = randomIntBetween(0, (int) totalQueries - 1); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1082,7 +1058,7 @@ public class PercolatorIT extends ESIntegTestCase { int runs = randomIntBetween(3, 16); for (int i = 0; i < runs; i++) { onlyCount = randomBoolean(); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1097,7 +1073,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < runs; i++) { onlyCount = randomBoolean(); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1113,7 +1089,7 @@ public class PercolatorIT extends ESIntegTestCase { for (int i = 0; i < runs; i++) { onlyCount = randomBoolean(); size = randomIntBetween(0, (int) numQueriesPerLevel - 1); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setOnlyCount(onlyCount) .setSize(size) @@ -1156,7 +1132,7 @@ public class PercolatorIT extends ESIntegTestCase { int runs = randomInt(27); for (int i = 0; i < runs; i++) { int size = randomIntBetween(1, 50); - PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") + PercolateResponse response = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("my-type") .setScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1173,7 +1149,7 @@ public class PercolatorIT extends ESIntegTestCase { // Sort the queries by the score for (int i = 0; i < runs; i++) { int size = randomIntBetween(1, 10); - PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") + PercolateResponse response = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1196,7 +1172,7 @@ public class PercolatorIT extends ESIntegTestCase { int value = usedValues.get(randomInt(usedValues.size() - 1)); NavigableSet levels = controlMap.get(value); int size = randomIntBetween(1, levels.size()); - PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") + PercolateResponse response = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(size) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1232,7 +1208,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") + PercolateResponse response = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(2) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1251,7 +1227,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); ensureGreen(); - PercolateResponse response = client().preparePercolate().setIndices(INDEX_NAME).setDocumentType("my-type") + PercolateResponse response = preparePercolate(client()).setIndices(INDEX_NAME).setDocumentType("my-type") .setSortByScore(true) .setSize(2) .setPercolateDoc(docBuilder().setDoc("field", "value")) @@ -1292,7 +1268,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate doc with field1=The quick brown fox jumps over the lazy dog"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) @@ -1312,7 +1288,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> Query percolate doc with field1=The quick brown fox jumps over the lazy dog"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) @@ -1338,7 +1314,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> Query percolate with score for doc with field1=The quick brown fox jumps over the lazy dog"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) @@ -1370,7 +1346,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) @@ -1402,7 +1378,7 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(matches[4].getHighlightFields().get("field1").fragments()[0].string(), equalTo("The quick brown fox jumps over the lazy dog")); logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject())) @@ -1440,7 +1416,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Top percolate for doc with field1=The quick brown fox jumps over the lazy dog"); - response = client().preparePercolate() + response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setSize(5) .setGetRequest(Requests.getRequest(INDEX_NAME).type("type").id("1")) @@ -1489,7 +1465,7 @@ public class PercolatorIT extends ESIntegTestCase { .execute().actionGet(); refresh(); - PercolateResponse percolate = client().preparePercolate() + PercolateResponse percolate = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("doc") .setSource(jsonBuilder().startObject() .startObject("doc").field("message", "A new bonsai tree ").endObject() @@ -1501,9 +1477,9 @@ public class PercolatorIT extends ESIntegTestCase { public void testNestedPercolation() throws IOException { initNestedIndexAndPercolation(); - PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); + PercolateResponse response = preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getNotMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); assertEquals(response.getMatches().length, 0); - response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); + response = preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(getMatchingNestedDoc())).setIndices(INDEX_NAME).setDocumentType("company").get(); assertEquals(response.getMatches().length, 1); assertEquals(response.getMatches()[0].getId().string(), "Q"); } @@ -1513,7 +1489,7 @@ public class PercolatorIT extends ESIntegTestCase { XContentBuilder doc = jsonBuilder(); doc.startObject(); doc.field("some_unnested_field", "value"); - PercolateResponse response = client().preparePercolate().setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(doc)).setIndices(INDEX_NAME).setDocumentType("company").get(); + PercolateResponse response = preparePercolate(client()).setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc(doc)).setIndices(INDEX_NAME).setDocumentType("company").get(); assertNoFailures(response); } @@ -1522,9 +1498,9 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex(INDEX_NAME, "company", "notmatching").setSource(getNotMatchingNestedDoc()).get(); client().prepareIndex(INDEX_NAME, "company", "matching").setSource(getMatchingNestedDoc()).get(); refresh(); - PercolateResponse response = client().preparePercolate().setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("notmatching")).setDocumentType("company").setIndices(INDEX_NAME).get(); + PercolateResponse response = preparePercolate(client()).setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("notmatching")).setDocumentType("company").setIndices(INDEX_NAME).get(); assertEquals(response.getMatches().length, 0); - response = client().preparePercolate().setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("matching")).setDocumentType("company").setIndices(INDEX_NAME).get(); + response = preparePercolate(client()).setGetRequest(Requests.getRequest(INDEX_NAME).type("company").id("matching")).setDocumentType("company").setIndices(INDEX_NAME).get(); assertEquals(response.getMatches().length, 1); assertEquals(response.getMatches()[0].getId().string(), "Q"); } @@ -1543,7 +1519,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); refresh(); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field", "value").endObject())) .setPercolateQuery(QueryBuilders.matchAllQuery()) @@ -1594,7 +1570,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); logger.info("--> Percolate doc with field1=b"); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("my-type") .setPercolateDoc(docBuilder().setDoc("timestamp", System.currentTimeMillis())) .get(); @@ -1754,7 +1730,7 @@ public class PercolatorIT extends ESIntegTestCase { client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q5).setId("q5").get(); client().prepareIndex(INDEX_NAME, TYPE_NAME).setSource(q6).setId("q6").get(); refresh(); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); @@ -1767,12 +1743,12 @@ public class PercolatorIT extends ESIntegTestCase { assertTrue(expectedIds.remove(match.getId().string())); } assertTrue(expectedIds.isEmpty()); - response = client().preparePercolate().setOnlyCount(true) + response = preparePercolate(client()).setOnlyCount(true) .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); assertMatchCount(response, 3L); - response = client().preparePercolate().setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setSize(10).setPercolateQuery(QueryBuilders.termQuery("text", "foo")) + response = preparePercolate(client()).setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setSize(10).setPercolateQuery(QueryBuilders.termQuery("text", "foo")) .setIndices(INDEX_NAME).setDocumentType("doc") .setPercolateDoc(docBuilder().setDoc(doc)) .get(); @@ -1791,7 +1767,7 @@ public class PercolatorIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("query", matchQuery("field1", "value")).endObject()).get(); refresh(); logger.info("--> Percolate doc with field1=value"); - PercolateResponse response1 = client().preparePercolate() + PercolateResponse response1 = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value").endObject())) .execute().actionGet(); @@ -1813,7 +1789,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); refresh(); - PercolateResponse response1 = client().preparePercolate() + PercolateResponse response1 = preparePercolate(client()) .setIndices(INDEX_NAME).setDocumentType("type") .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject() .startObject("location") @@ -1849,7 +1825,7 @@ public class PercolatorIT extends ESIntegTestCase { refresh(); // Just percolating a document that has a _parent field in its mapping should just work: - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setDocumentType("parent") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("field", "value")) .get(); @@ -1864,7 +1840,7 @@ public class PercolatorIT extends ESIntegTestCase { .get(); refresh(); - PercolateResponse response = client().preparePercolate() + PercolateResponse response = preparePercolate(client()) .setIndices(INDEX_NAME) .setDocumentType("type") .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("{}")) diff --git a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java similarity index 85% rename from core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 0065e46875a..aba81575b37 100644 --- a/core/src/test/java/org/elasticsearch/search/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.percolator; +package org.elasticsearch.percolator; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; @@ -24,17 +24,20 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.Collection; +import java.util.Collections; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; -import static org.elasticsearch.index.query.QueryBuilders.percolateQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; @@ -46,6 +49,11 @@ import static org.hamcrest.Matchers.startsWith; public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + public void testPercolatorQuery() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=keyword", "field2", "type=keyword") @@ -68,7 +76,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { BytesReference source = jsonBuilder().startObject().endObject().bytes(); logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", source)) + .setQuery(new PercolateQueryBuilder("query", "type", source)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); @@ -76,7 +84,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", source)) + .setQuery(new PercolateQueryBuilder("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -86,7 +94,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", source)) + .setQuery(new PercolateQueryBuilder("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -120,14 +128,14 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", "test", "type", "1")) + .setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "1", null, null, null)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", "test", "type", "2")) + .setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "2", null, null, null)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -136,7 +144,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", "test", "type", "3")) + .setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "3", null, null, null)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -161,8 +169,8 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { client().prepareIndex("test", "queries", "3") .setSource(jsonBuilder().startObject().field("query", spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true) ).endObject()) .get(); @@ -172,12 +180,12 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) - .clause(spanTermQuery("field1", "lazy")) - .clause(spanTermQuery("field1", "dog")) + .addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(2) ).endObject()) .get(); @@ -187,12 +195,12 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .setSource(jsonBuilder().startObject().field("query", spanNotQuery( spanNearQuery(spanTermQuery("field1", "quick"), 0) - .clause(spanTermQuery("field1", "brown")) - .clause(spanTermQuery("field1", "fox")) + .addClause(spanTermQuery("field1", "brown")) + .addClause(spanTermQuery("field1", "fox")) .inOrder(true), spanNearQuery(spanTermQuery("field1", "the"), 0) - .clause(spanTermQuery("field1", "lazy")) - .clause(spanTermQuery("field1", "dog")) + .addClause(spanTermQuery("field1", "lazy")) + .addClause(spanTermQuery("field1", "dog")) .inOrder(true)).dist(3) ).endObject()) .get(); @@ -203,7 +211,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .field("field2", "the quick brown fox falls down into the well") .endObject().bytes(); SearchResponse response = client().prepareSearch() - .setQuery(percolateQuery("query", "type", source)) + .setQuery(new PercolateQueryBuilder("query", "type", source)) .addSort("_uid", SortOrder.ASC) .get(); assertHitCount(response, 4); @@ -250,7 +258,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .field("field1", "The quick brown fox jumps over the lazy dog") .endObject().bytes(); SearchResponse searchResponse = client().prepareSearch() - .setQuery(percolateQuery("query", "type", document)) + .setQuery(new PercolateQueryBuilder("query", "type", document)) .highlighter(new HighlightBuilder().field("field1")) .addSort("_uid", SortOrder.ASC) .get(); @@ -284,34 +292,30 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch().setQuery( - percolateQuery("query", "type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}")) + new PercolateQueryBuilder("query", "type", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}")) ).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } - public void testIllegalMappings() throws Exception { + public void testManyPercolatorFields() throws Exception { String queryFieldName = randomAsciiOfLength(8); - MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - createIndex("test", client().admin().indices().prepareCreate("test") - .addMapping("doc_type", "field", "type=keyword") - .addMapping("query_type1", queryFieldName, "type=percolator") - .addMapping("query_type2", queryFieldName, "type=percolator", "second_query_field", "type=percolator") - .addMapping("query_type3", jsonBuilder().startObject().startObject("query_type3").startObject("properties") - .startObject("object_field") - .field("type", "object") - .startObject("properties") - .startObject(queryFieldName) - .field("type", "percolator") - .endObject() - .endObject() - .endObject() - .endObject().endObject()) - ); - }); - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getCause().getMessage(), startsWith("Up to one percolator field type is allowed per index")); + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("doc_type", "field", "type=keyword") + .addMapping("query_type1", queryFieldName, "type=percolator") + .addMapping("query_type2", queryFieldName, "type=percolator", "second_query_field", "type=percolator") + .addMapping("query_type3", jsonBuilder().startObject().startObject("query_type3").startObject("properties") + .startObject("object_field") + .field("type", "object") + .startObject("properties") + .startObject(queryFieldName) + .field("type", "percolator") + .endObject() + .endObject() + .endObject() + .endObject().endObject()) + ); } public void testWithMultiplePercolatorFields() throws Exception { @@ -346,7 +350,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); SearchResponse response = client().prepareSearch() - .setQuery(percolateQuery(queryFieldName, "doc_type", source)) + .setQuery(new PercolateQueryBuilder(queryFieldName, "doc_type", source)) .setIndices("test1") .get(); assertHitCount(response, 1); @@ -355,7 +359,7 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { assertThat(response.getHits().getAt(0).index(), equalTo("test1")); response = client().prepareSearch() - .setQuery(percolateQuery("object_field." + queryFieldName, "doc_type", source)) + .setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, "doc_type", source)) .setIndices("test2") .get(); assertHitCount(response, 1); diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java similarity index 87% rename from plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java index 9674d541354..9f4eff64995 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorRestIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugin.deletebyquery.test.rest; +package org.elasticsearch.percolator; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -27,9 +27,8 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -public class DeleteByQueryRestIT extends ESRestTestCase { - - public DeleteByQueryRestIT(@Name("yaml") RestTestCandidate testCandidate) { +public class PercolatorRestIT extends ESRestTestCase { + public PercolatorRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } @@ -38,4 +37,3 @@ public class DeleteByQueryRestIT extends ESRestTestCase { return ESRestTestCase.createParameters(0, 1); } } - diff --git a/test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorTestUtil.java similarity index 57% rename from test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorTestUtil.java index 671d7b8fd3b..32ea4c23ad4 100644 --- a/test/framework/src/main/java/org/elasticsearch/percolator/PercolatorTestUtil.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorTestUtil.java @@ -19,13 +19,33 @@ package org.elasticsearch.percolator; -import org.elasticsearch.action.percolate.PercolateResponse; +import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.junit.Assert; +import static org.hamcrest.Matchers.greaterThan; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertVersionSerializable; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.formatShardStatus; + /** Static method pulled out of PercolatorIT, used by other tests */ public class PercolatorTestUtil extends Assert { - + + public static PercolateRequestBuilder preparePercolate(ElasticsearchClient client) { + return new PercolateRequestBuilder(client, PercolateAction.INSTANCE); + } + + public static MultiPercolateRequestBuilder prepareMultiPercolate(ElasticsearchClient client) { + return new MultiPercolateRequestBuilder(client, MultiPercolateAction.INSTANCE); + } + + public static void assertMatchCount(PercolateResponse percolateResponse, long expectedHitCount) { + if (percolateResponse.getCount() != expectedHitCount) { + fail("Count is " + percolateResponse.getCount() + " but " + expectedHitCount + " was expected. " + + formatShardStatus(percolateResponse)); + } + assertVersionSerializable(percolateResponse); + } + public static String[] convertFromTextArray(PercolateResponse.Match[] matches, String index) { if (matches.length == 0) { return Strings.EMPTY_ARRAY; diff --git a/core/src/test/resources/indices/percolator/bwc_index_2.0.0.zip b/modules/percolator/src/test/resources/indices/percolator/bwc_index_2.0.0.zip similarity index 100% rename from core/src/test/resources/indices/percolator/bwc_index_2.0.0.zip rename to modules/percolator/src/test/resources/indices/percolator/bwc_index_2.0.0.zip diff --git a/core/src/test/resources/org/elasticsearch/action/percolate/mpercolate1.json b/modules/percolator/src/test/resources/org/elasticsearch/percolator/mpercolate1.json similarity index 100% rename from core/src/test/resources/org/elasticsearch/action/percolate/mpercolate1.json rename to modules/percolator/src/test/resources/org/elasticsearch/percolator/mpercolate1.json diff --git a/core/src/test/resources/org/elasticsearch/action/percolate/mpercolate2.json b/modules/percolator/src/test/resources/org/elasticsearch/percolator/mpercolate2.json similarity index 100% rename from core/src/test/resources/org/elasticsearch/action/percolate/mpercolate2.json rename to modules/percolator/src/test/resources/org/elasticsearch/percolator/mpercolate2.json diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/mpercolate/10_basic.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/mpercolate/10_basic.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/mpercolate/10_basic.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/percolate/15_new.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/percolate/15_new.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/percolate/15_new.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/percolate/16_existing_doc.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/percolate/16_existing_doc.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/percolate/16_existing_doc.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/percolate/17_empty.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/percolate/17_empty.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/percolate/17_empty.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/percolate/18_highligh_with_query.yaml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml b/modules/percolator/src/test/resources/rest-api-spec/test/percolate/19_nested.yaml similarity index 100% rename from rest-api-spec/src/main/resources/rest-api-spec/test/percolate/19_nested.yaml rename to modules/percolator/src/test/resources/rest-api-spec/test/percolate/19_nested.yaml diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index ee2f5484737..0921abcb677 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -259,10 +259,6 @@ public abstract class AbstractAsyncBulkByScrollAction failures = new ArrayList(); Set destinationIndicesThisBatch = new HashSet<>(); @@ -291,6 +287,12 @@ public abstract class AbstractAsyncBulkByScrollActionit's results won't be visible until the index is refreshed. * */ -public class DeleteByQueryRequest extends AbstractBulkByScrollRequest { +public class DeleteByQueryRequest extends AbstractBulkByScrollRequest implements IndicesRequest { public DeleteByQueryRequest() { } @@ -76,4 +78,16 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest action.onBulkResponse(null)); + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> + action.onBulkResponse(new BulkResponse(new BulkItemResponse[0], 0))); } public void testCancelBeforeStartNextScroll() throws Exception { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java deleted file mode 100644 index fc6af19e6b3..00000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTestUtils.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.action.ListenableActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; -import org.elasticsearch.index.reindex.BulkByScrollTask.Status; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.NativeScriptFactory; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import static java.util.Collections.emptyMap; -import static org.elasticsearch.test.ESIntegTestCase.client; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; - -/** - * Utilities for testing reindex and update-by-query cancellation. This whole class isn't thread safe. Luckily we run our tests in separate - * jvms. - */ -public class CancelTestUtils { - public static Collection> nodePlugins() { - return Arrays.asList(ReindexPlugin.class, StickyScriptPlugin.class); - } - - private static final CyclicBarrier barrier = new CyclicBarrier(2); - - public static , - Builder extends AbstractBulkIndexByScrollRequestBuilder> - BulkIndexByScrollResponse testCancel(ESIntegTestCase test, Builder request, String actionToCancel) throws Exception { - - test.indexRandom(true, client().prepareIndex("source", "test", "1").setSource("foo", "a"), - client().prepareIndex("source", "test", "2").setSource("foo", "a")); - - request.source("source").script(new Script("sticky", ScriptType.INLINE, "native", emptyMap())); - request.source().setSize(1); - ListenableActionFuture response = request.execute(); - - // Wait until the script is on the first document. - barrier.await(30, TimeUnit.SECONDS); - - // Let just one document through. - barrier.await(30, TimeUnit.SECONDS); - - // Wait until the script is on the second document. - barrier.await(30, TimeUnit.SECONDS); - - // Status should show running - ListTasksResponse tasksList = client().admin().cluster().prepareListTasks().setActions(actionToCancel).setDetailed(true).get(); - assertThat(tasksList.getNodeFailures(), empty()); - assertThat(tasksList.getTaskFailures(), empty()); - assertThat(tasksList.getTasks(), hasSize(1)); - BulkByScrollTask.Status status = (Status) tasksList.getTasks().get(0).getStatus(); - assertNull(status.getReasonCancelled()); - - // Cancel the request while the script is running. This will prevent the request from being sent at all. - List cancelledTasks = client().admin().cluster().prepareCancelTasks().setActions(actionToCancel).get().getTasks(); - assertThat(cancelledTasks, hasSize(1)); - - // The status should now show canceled. The request will still be in the list because the script is still blocked. - tasksList = client().admin().cluster().prepareListTasks().setActions(actionToCancel).setDetailed(true).get(); - assertThat(tasksList.getNodeFailures(), empty()); - assertThat(tasksList.getTaskFailures(), empty()); - assertThat(tasksList.getTasks(), hasSize(1)); - status = (Status) tasksList.getTasks().get(0).getStatus(); - assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); - - // Now let the next document through. It won't be sent because the request is cancelled but we need to unblock the script. - barrier.await(); - - // Now we can just wait on the request and make sure it was actually cancelled half way through. - return response.get(); - } - - public static class StickyScriptPlugin extends Plugin { - @Override - public String name() { - return "sticky-script"; - } - - @Override - public String description() { - return "installs a script that \"sticks\" when it runs for testing reindex"; - } - - public void onModule(ScriptModule module) { - module.registerScript("sticky", StickyScriptFactory.class); - } - } - - public static class StickyScriptFactory implements NativeScriptFactory { - @Override - public ExecutableScript newScript(Map params) { - return new ExecutableScript() { - private Map source; - @Override - @SuppressWarnings("unchecked") // Safe because _ctx always has this shape - public void setNextVar(String name, Object value) { - if ("ctx".equals(name)) { - Map ctx = (Map) value; - source = (Map) ctx.get("_source"); - } else { - throw new IllegalArgumentException("Unexpected var: " + name); - } - } - - @Override - public Object run() { - try { - // Tell the test we've started a document. - barrier.await(30, TimeUnit.SECONDS); - - // Wait for the test to tell us to proceed. - barrier.await(30, TimeUnit.SECONDS); - - // Make some change to the source so that update-by-query tests can make sure only one document was changed. - source.put("giraffes", "giraffes"); - return null; - } catch (InterruptedException | BrokenBarrierException | TimeoutException e) { - throw new RuntimeException(e); - } - } - }; - } - - @Override - public boolean needsScores() { - return false; - } - } -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java new file mode 100644 index 00000000000..e2cc5b43f0a --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -0,0 +1,240 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.Engine.Operation.Origin; +import org.elasticsearch.index.shard.IndexingOperationListener; +import org.elasticsearch.plugins.Plugin; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +/** + * Test that you can actually cancel a reindex/update-by-query/delete-by-query request and all the plumbing works. Doesn't test all of the + * different cancellation places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to + * simulate failures but do not exercise important portion of the stack like transport and task management. + */ +public class CancelTests extends ReindexTestCase { + + protected static final String INDEX = "reindex-cancel-index"; + protected static final String TYPE = "reindex-cancel-type"; + + private static final int MIN_OPERATIONS = 2; + private static final int BLOCKING_OPERATIONS = 1; + + // Semaphore used to allow & block indexing operations during the test + private static final Semaphore ALLOWED_OPERATIONS = new Semaphore(0); + + @Override + protected Collection> nodePlugins() { + Collection> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(ReindexCancellationPlugin.class); + return plugins; + } + + @BeforeClass + public static void clearAllowedOperations() { + ALLOWED_OPERATIONS.drainPermits(); + } + + /** + * Executes the cancellation test + */ + private void testCancel(String action, + AbstractBulkByScrollRequestBuilder builder, + CancelAssertion assertion) throws Exception { + + createIndex(INDEX); + + // Total number of documents created for this test (~10 per primary shard) + int numDocs = getNumShards(INDEX).numPrimaries * 10; + ALLOWED_OPERATIONS.release(numDocs); + + indexRandom(true, false, true, IntStream.range(0, numDocs) + .mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)) + .collect(Collectors.toList())); + + // Checks that the all documents have been indexed and correctly counted + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), numDocs); + assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); + + // Scroll 1 by 1 so that cancellation is easier to control + builder.source().setSize(1); + + // Allow a random number of the documents minus 1 + // to be modified by the reindex action + int numModifiedDocs = randomIntBetween(MIN_OPERATIONS, numDocs); + ALLOWED_OPERATIONS.release(numModifiedDocs - BLOCKING_OPERATIONS); + + // Now execute the reindex action... + ListenableActionFuture future = builder.execute(); + + // ... and waits for the indexing operation listeners to block + awaitBusy(() -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0); + + // Status should show the task running + ListTasksResponse tasksList = client().admin().cluster().prepareListTasks().setActions(action).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertNull(status.getReasonCancelled()); + + // Cancel the request while the reindex action is blocked by the indexing operation listeners. + // This will prevent further requests from being sent. + List cancelledTasks = client().admin().cluster().prepareCancelTasks().setActions(action).get().getTasks(); + assertThat(cancelledTasks, hasSize(1)); + + // The status should now show canceled. The request will still be in the list because it is still blocked. + tasksList = client().admin().cluster().prepareListTasks().setActions(action).setDetailed(true).get(); + assertThat(tasksList.getNodeFailures(), empty()); + assertThat(tasksList.getTaskFailures(), empty()); + assertThat(tasksList.getTasks(), hasSize(1)); + status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); + assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); + + // Unblock the last operation + ALLOWED_OPERATIONS.release(BLOCKING_OPERATIONS); + + // Checks that no more operations are executed + assertBusy(() -> assertTrue(ALLOWED_OPERATIONS.availablePermits() == 0 && ALLOWED_OPERATIONS.getQueueLength() == 0)); + + // And check the status of the response + BulkIndexByScrollResponse response = future.get(); + assertThat(response.getReasonCancelled(), equalTo("by user request")); + assertThat(response.getIndexingFailures(), emptyIterable()); + assertThat(response.getSearchFailures(), emptyIterable()); + + flushAndRefresh(INDEX); + assertion.assertThat(response, numDocs, numModifiedDocs); + } + + public void testReindexCancel() throws Exception { + testCancel(ReindexAction.NAME, reindex().source(INDEX).destination("dest", TYPE), (response, total, modified) -> { + assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); + + refresh("dest"); + assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified); + }); + } + + public void testUpdateByQueryCancel() throws Exception { + BytesReference pipeline = new BytesArray("{\n" + + " \"description\" : \"sets updated to true\",\n" + + " \"processors\" : [ {\n" + + " \"set\" : {\n" + + " \"field\": \"updated\",\n" + + " \"value\": true" + + " }\n" + + " } ]\n" + + "}"); + assertAcked(client().admin().cluster().preparePutPipeline("set-foo", pipeline).get()); + + testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-foo").source(INDEX), (response, total, modified) -> { + assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); + assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("updated", true)).get(), modified); + }); + + assertAcked(client().admin().cluster().deletePipeline(new DeletePipelineRequest("set-foo")).get()); + } + + public void testDeleteByQueryCancel() throws Exception { + testCancel(DeleteByQueryAction.NAME, deleteByQuery().source(INDEX), (response, total, modified) -> { + assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); + assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), total - modified); + }); + } + + /** + * {@link CancelAssertion} is used to check the result of the cancel test. + */ + private interface CancelAssertion { + void assertThat(BulkIndexByScrollResponse response, int total, int modified); + } + + public static class ReindexCancellationPlugin extends Plugin { + + @Override + public String name() { + return "reindex-cancellation"; + } + + @Override + public String description() { + return "See " + CancelTests.class.getName() + " documentation"; + } + + @Override + public void onIndexModule(IndexModule indexModule) { + indexModule.addIndexOperationListener(new BlockingDeleteListener()); + } + } + + public static class BlockingDeleteListener implements IndexingOperationListener { + + @Override + public Engine.Index preIndex(Engine.Index index) { + return preCheck(index, index.type()); + } + + @Override + public Engine.Delete preDelete(Engine.Delete delete) { + return preCheck(delete, delete.type()); + } + + private T preCheck(T operation, String type) { + if ((TYPE.equals(type) == false) || (operation.origin() != Origin.PRIMARY)) { + return operation; + } + + try { + if (ALLOWED_OPERATIONS.tryAcquire(30, TimeUnit.SECONDS)) { + return operation; + } + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + throw new IllegalStateException("Something went wrong"); + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java deleted file mode 100644 index 6007b646429..00000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryCancelTests.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.action.ListenableActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; -import org.elasticsearch.common.util.concurrent.CountDown; -import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.shard.IndexingOperationListener; -import org.elasticsearch.plugins.Plugin; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -/** - * Tests that you can actually cancel a delete-by-query request and all the plumbing works. Doesn't test all of the different cancellation - * places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do - * not exercise important portion of the stack like transport and task management. - */ -public class DeleteByQueryCancelTests extends ReindexTestCase { - - private static final String INDEX = "test-delete-by-query"; - private static final String TYPE = "test"; - - private static final int MAX_DELETIONS = 10; - private static final CyclicBarrier barrier = new CyclicBarrier(2); - - @Override - protected int numberOfShards() { - // Only 1 shard and no replica so that test execution - // can be easily controlled within a {@link IndexingOperationListener#preDelete} - return 1; - } - - @Override - protected int numberOfReplicas() { - // Only 1 shard and no replica so that test execution - // can be easily controlled within a {@link IndexingOperationListener#preDelete} - return 0; - } - - @Override - protected Collection> nodePlugins() { - Collection> plugins = new ArrayList<>(super.nodePlugins()); - plugins.add(DeleteByQueryCancellationPlugin.class); - return plugins; - } - - public void testCancel() throws Exception { - createIndex(INDEX); - - int totalNumShards = getNumShards(INDEX).totalNumShards; - - // Number of documents to be deleted in this test - final int nbDocsToDelete = totalNumShards * MAX_DELETIONS; - - // Total number of documents that will be created in this test - final int nbDocs = nbDocsToDelete * randomIntBetween(1, 5); - for (int i = 0; i < nbDocs; i++) { - indexRandom(false, client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i)); - } - - refresh(INDEX); - assertHitCount(client().prepareSearch(INDEX).setSize(0).get(), nbDocs); - - // Executes the delete by query; each shard will block after MAX_DELETIONS - DeleteByQueryRequestBuilder deleteByQuery = deleteByQuery().source("_all"); - deleteByQuery.source().setSize(1); - - ListenableActionFuture future = deleteByQuery.execute(); - - // Waits for the indexing operation listener to block - barrier.await(30, TimeUnit.SECONDS); - - // Status should show running - ListTasksResponse tasksList = client().admin().cluster().prepareListTasks() - .setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); - assertThat(tasksList.getNodeFailures(), empty()); - assertThat(tasksList.getTaskFailures(), empty()); - assertThat(tasksList.getTasks(), hasSize(1)); - BulkByScrollTask.Status status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); - assertNull(status.getReasonCancelled()); - - // Cancel the request while the deletions are blocked. This will prevent further deletions requests from being sent. - List cancelledTasks = client().admin().cluster().prepareCancelTasks() - .setActions(DeleteByQueryAction.NAME).get().getTasks(); - assertThat(cancelledTasks, hasSize(1)); - - // The status should now show canceled. The request will still be in the list because the script is still blocked. - tasksList = client().admin().cluster().prepareListTasks().setActions(DeleteByQueryAction.NAME).setDetailed(true).get(); - assertThat(tasksList.getNodeFailures(), empty()); - assertThat(tasksList.getTaskFailures(), empty()); - assertThat(tasksList.getTasks(), hasSize(1)); - status = (BulkByScrollTask.Status) tasksList.getTasks().get(0).getStatus(); - assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled()); - - // Now unblock the listener so that it can proceed - barrier.await(); - - // And check the status of the response - BulkIndexByScrollResponse response = future.get(); - assertThat(response, matcher() - .deleted(lessThanOrEqualTo((long) MAX_DELETIONS)).batches(MAX_DELETIONS).reasonCancelled(equalTo("by user request"))); - } - - - public static class DeleteByQueryCancellationPlugin extends Plugin { - - @Override - public String name() { - return "delete-by-query-cancellation"; - } - - @Override - public String description() { - return "See " + DeleteByQueryCancellationPlugin.class.getName(); - } - - @Override - public void onIndexModule(IndexModule indexModule) { - indexModule.addIndexOperationListener(new BlockingDeleteListener()); - } - } - - /** - * A {@link IndexingOperationListener} that allows a given number of documents to be deleted - * and then blocks until it is notified to proceed. - */ - public static class BlockingDeleteListener implements IndexingOperationListener { - - private final CountDown blockAfter = new CountDown(MAX_DELETIONS); - - @Override - public Engine.Delete preDelete(Engine.Delete delete) { - if (blockAfter.isCountedDown() || (TYPE.equals(delete.type()) == false)) { - return delete; - } - - if (blockAfter.countDown()) { - try { - // Tell the test we've deleted enough documents. - barrier.await(30, TimeUnit.SECONDS); - - // Wait for the test to tell us to proceed. - barrier.await(30, TimeUnit.SECONDS); - } catch (InterruptedException | BrokenBarrierException | TimeoutException e) { - throw new RuntimeException(e); - } - } - return delete; - } - } -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java deleted file mode 100644 index f9dde6045a6..00000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexCancelTests.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.plugins.Plugin; - -import java.util.Collection; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.equalTo; - -/** - * Tests that you can actually cancel a reindex request and all the plumbing works. Doesn't test all of the different cancellation places - - * that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do not - * exercise important portion of the stack like transport and task management. - */ -public class ReindexCancelTests extends ReindexTestCase { - public void testCancel() throws Exception { - BulkIndexByScrollResponse response = CancelTestUtils.testCancel(this, reindex().destination("dest", "test"), ReindexAction.NAME); - - assertThat(response, matcher().created(1).reasonCancelled(equalTo("by user request"))); - refresh("dest"); - assertHitCount(client().prepareSearch("dest").setSize(0).get(), 1); - } - - @Override - protected int numberOfShards() { - return 1; - } - - @Override - protected Collection> nodePlugins() { - return CancelTestUtils.nodePlugins(); - } -} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java index f4a777a1973..97d0c05083e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexTestCase.java @@ -51,7 +51,7 @@ public abstract class ReindexTestCase extends ESIntegTestCase { return RethrottleAction.INSTANCE.newRequestBuilder(client()); } - protected static BulkIndexByScrollResponseMatcher matcher() { + public static BulkIndexByScrollResponseMatcher matcher() { return new BulkIndexByScrollResponseMatcher(); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 0b557898552..478f7707dfd 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -20,136 +20,164 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.MockSearchService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.IntFunction; +import java.util.concurrent.CyclicBarrier; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.index.reindex.ReindexTestCase.matcher; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; /** * Integration test for retry behavior. Useful because retrying relies on the way that the rest of Elasticsearch throws exceptions and unit * tests won't verify that. */ -public class RetryTests extends ReindexTestCase { - /** - * The number of concurrent requests to test. - */ - private static final int CONCURRENT = 12; - /** - * Enough docs that the requests will likely step on each other. - */ - private static final int DOC_COUNT = 200; +public class RetryTests extends ESSingleNodeTestCase { + private static final int DOC_COUNT = 20; + + private List blockedExecutors = new ArrayList<>(); + + @Override + protected Collection> getPlugins() { + return pluginList(ReindexPlugin.class); + } /** * Lower the queue sizes to be small enough that both bulk and searches will time out and have to be retried. */ @Override - protected Settings nodeSettings(int nodeOrdinal) { - Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal)); - settings.put("threadpool.bulk.queue_size", 1); + protected Settings nodeSettings() { + Settings.Builder settings = Settings.builder().put(super.nodeSettings()); + // Use pools of size 1 so we can block them settings.put("threadpool.bulk.size", 1); - settings.put("threadpool.search.queue_size", 1); settings.put("threadpool.search.size", 1); + // Use queues of size 1 because size 0 is broken and because search requests need the queue to function + settings.put("threadpool.bulk.queue_size", 1); + settings.put("threadpool.search.queue_size", 1); return settings.build(); } - /** - * Disable search context leak detection because we expect leaks when there is an {@link EsRejectedExecutionException} queueing the - * reduce phase. - */ - @Override - protected Collection> getMockPlugins() { - List> mockPlugins = new ArrayList<>(); - for (Class plugin: super.getMockPlugins()) { - if (plugin.equals(MockSearchService.TestPlugin.class)) { - continue; - } - mockPlugins.add(plugin); + @Before + public void setupSourceIndex() throws Exception { + createIndex("source"); + // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < DOC_COUNT; i++) { + bulk.add(client().prepareIndex("source", "test").setSource("foo", "bar " + i)); + } + Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()); + BulkResponse response = retry.withSyncBackoff(client(), bulk.request()); + assertFalse(response.buildFailureMessage(), response.hasFailures()); + client().admin().indices().prepareRefresh("source").get(); + } + + @After + public void forceUnblockAllExecutors() { + for (CyclicBarrier barrier: blockedExecutors) { + barrier.reset(); } - return mockPlugins; } public void testReindex() throws Exception { - setupSourceIndex("source"); - testCase(true, i -> reindex().source("source").destination("dest" + i)); + testCase(ReindexAction.NAME, ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest"), + matcher().created(DOC_COUNT)); } public void testUpdateByQuery() throws Exception { - for (int i = 0; i < CONCURRENT; i++) { - setupSourceIndex("source" + i); - } - testCase(false, i -> updateByQuery().source("source" + i)); + testCase(UpdateByQueryAction.NAME, UpdateByQueryAction.INSTANCE.newRequestBuilder(client()).source("source"), + matcher().updated(DOC_COUNT)); } - private void testCase(boolean expectCreated, IntFunction> requestBuilder) - throws Exception { - List> futures = new ArrayList<>(CONCURRENT); - for (int i = 0; i < CONCURRENT; i++) { - AbstractBulkIndexByScrollRequestBuilder request = requestBuilder.apply(i); - // Make sure we use more than one batch so we get the full reindex behavior - request.source().setSize(DOC_COUNT / randomIntBetween(2, 10)); - // Use a low, random initial wait so we are unlikely collide with others retrying. - request.setRetryBackoffInitialTime(timeValueMillis(randomIntBetween(10, 300))); - futures.add(request.execute()); - } - - // Finish all the requests - List responses = new ArrayList<>(CONCURRENT); - for (ListenableActionFuture future : futures) { - responses.add(future.get()); - } - - // Now check them - long bulkRetries = 0; - long searchRetries = 0; - BulkIndexByScrollResponseMatcher matcher = matcher(); - if (expectCreated) { - matcher.created(DOC_COUNT); - } else { - matcher.updated(DOC_COUNT); - } - for (BulkIndexByScrollResponse response : responses) { - assertThat(response, matcher); - bulkRetries += response.getBulkRetries(); - searchRetries += response.getSearchRetries(); - } - - // We expect at least one retry or this test isn't very useful - assertThat(bulkRetries, greaterThan(0L)); - assertThat(searchRetries, greaterThan(0L)); + public void testDeleteByQuery() throws Exception { + testCase(DeleteByQueryAction.NAME, DeleteByQueryAction.INSTANCE.newRequestBuilder(client()).source("source"), + matcher().deleted(DOC_COUNT)); } - private void setupSourceIndex(String name) { - try { - // Build the test index with a single shard so we can be sure that a search request *can* complete with the one thread - assertAcked(client().admin().indices().prepareCreate(name).setSettings( - "index.number_of_shards", 1, - "index.number_of_replicas", 0).get()); - waitForRelocation(ClusterHealthStatus.GREEN); - // Build the test data. Don't use indexRandom because that won't work consistently with such small thread pools. - BulkRequestBuilder bulk = client().prepareBulk(); - for (int i = 0; i < DOC_COUNT; i++) { - bulk.add(client().prepareIndex(name, "test").setSource("foo", "bar " + i)); + private void testCase(String action, AbstractBulkByScrollRequestBuilder request, + BulkIndexByScrollResponseMatcher matcher) throws Exception { + logger.info("Blocking search"); + CyclicBarrier initialSearchBlock = blockExecutor(ThreadPool.Names.SEARCH); + + // Make sure we use more than one batch so we have to scroll + request.source().setSize(DOC_COUNT / randomIntBetween(2, 10)); + + logger.info("Starting request"); + ListenableActionFuture responseListener = request.execute(); + + logger.info("Waiting for search rejections on the initial search"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); + + logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); + CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK); + initialSearchBlock.await(); + + logger.info("Waiting for bulk rejections"); + assertBusy(() -> assertThat(taskStatus(action).getBulkRetries(), greaterThan(0L))); + + // Keep a copy of the current number of search rejections so we can assert that we get more when we block the scroll + long initialSearchRejections = taskStatus(action).getSearchRetries(); + + logger.info("Blocking search and unblocking bulk so we should get search rejections for the scroll"); + CyclicBarrier scrollBlock = blockExecutor(ThreadPool.Names.SEARCH); + bulkBlock.await(); + + logger.info("Waiting for search rejections for the scroll"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(initialSearchRejections))); + + logger.info("Unblocking the scroll"); + scrollBlock.await(); + + logger.info("Waiting for the request to finish"); + BulkIndexByScrollResponse response = responseListener.get(); + assertThat(response, matcher); + assertThat(response.getBulkRetries(), greaterThan(0L)); + assertThat(response.getSearchRetries(), greaterThan(initialSearchRejections)); + } + + /** + * Blocks the named executor by getting its only thread running a task blocked on a CyclicBarrier and fills the queue with a noop task. + * So requests to use this queue should get {@link EsRejectedExecutionException}s. + */ + private CyclicBarrier blockExecutor(String name) throws Exception { + ThreadPool threadPool = getInstanceFromNode(ThreadPool.class); + CyclicBarrier barrier = new CyclicBarrier(2); + logger.info("Blocking the [{}] executor", name); + threadPool.executor(name).execute(() -> { + try { + threadPool.executor(name).execute(() -> {}); + barrier.await(); + logger.info("Blocked the [{}] executor", name); + barrier.await(); + logger.info("Ublocking the [{}] executor", name); + } catch (Exception e) { + throw new RuntimeException(e); } - Retry retry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.exponentialBackoff()); - BulkResponse response = retry.withSyncBackoff(client(), bulk.request()); - assertFalse(response.buildFailureMessage(), response.hasFailures()); - refresh(name); - } catch (Exception e) { - throw new RuntimeException(e); - } + }); + barrier.await(); + blockedExecutors.add(barrier); + return barrier; + } + + /** + * Fetch the status for a task of type "action". Fails if there aren't exactly one of that type of task running. + */ + private BulkByScrollTask.Status taskStatus(String action) { + ListTasksResponse response = client().admin().cluster().prepareListTasks().setActions(action).setDetailed(true).get(); + assertThat(response.getTasks(), hasSize(1)); + return (BulkByScrollTask.Status) response.getTasks().get(0).getStatus(); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java deleted file mode 100644 index 4cb859c0017..00000000000 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryCancelTests.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.reindex; - -import org.elasticsearch.plugins.Plugin; - -import java.util.Collection; - -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.equalTo; - -/** - * Tests that you can actually cancel an update-by-query request and all the plumbing works. Doesn't test all of the different cancellation - * places - that is the responsibility of {@link AsyncBulkByScrollActionTests} which have more precise control to simulate failures but do - * not exercise important portion of the stack like transport and task management. - */ -public class UpdateByQueryCancelTests extends ReindexTestCase { - public void testCancel() throws Exception { - BulkIndexByScrollResponse response = CancelTestUtils.testCancel(this, updateByQuery(), UpdateByQueryAction.NAME); - - assertThat(response, matcher().updated(1).reasonCancelled(equalTo("by user request"))); - refresh("source"); - assertHitCount(client().prepareSearch("source").setSize(0).setQuery(matchQuery("giraffes", "giraffes")).get(), 1); - } - - @Override - protected int numberOfShards() { - return 1; - } - - @Override - protected Collection> nodePlugins() { - return CancelTestUtils.nodePlugins(); - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryAction.java deleted file mode 100644 index d77da6e6d3b..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryAction.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.Action; -import org.elasticsearch.client.ElasticsearchClient; - -public class DeleteByQueryAction extends Action { - - public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); - public static final String NAME = "indices:data/write/delete/by_query"; - - private DeleteByQueryAction() { - super(NAME); - } - - @Override - public DeleteByQueryResponse newResponse() { - return new DeleteByQueryResponse(); - } - - @Override - public DeleteByQueryRequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new DeleteByQueryRequestBuilder(client, this); - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java deleted file mode 100644 index 682fec46c3b..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequest.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.Scroll; - -import java.io.IOException; -import java.util.Arrays; - -import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.search.Scroll.readScroll; - -/** - * Creates a new {@link DeleteByQueryRequest}. Delete-by-query is since elasticsearch 2.0.0 moved into a plugin - * and is not part of elasticsearch core. In contrast to the previous, in-core, implementation delete-by-query now - * uses scan/scroll and the returned IDs do delete all documents matching the query. This can have performance - * as well as visibility implications. Delete-by-query now has the following semantics: - *
    - *
  • it's non-actomic, a delete-by-query may fail at any time while some documents matching the query have already been deleted
  • - *
  • it's try-once, a delete-by-query may fail at any time and will not retry it's execution. All retry logic is left to the user
  • - *
  • it's syntactic sugar, a delete-by-query is equivalent to a scan/scroll search and corresponding bulk-deletes by ID
  • - *
  • it's executed on a point-in-time snapshot, a delete-by-query will only delete the documents that are visible at the point in time the delete-by-query was started, equivalent to the scan/scroll API
  • - *
  • it's consistent, a delete-by-query will yield consistent results across all replicas of a shard
  • - *
  • it's forward-compativle, a delete-by-query will only send IDs to the shards as deletes such that no queries are stored in the transaction logs that might not be supported in the future.
  • - *
  • it's results won't be visible until the user refreshes the index.
  • - *
- * - * The main reason why delete-by-query is now extracted as a plugin are: - *
    - *
  • forward-compatibility, the previous implementation was prone to store unsupported queries in the transaction logs which is equvalent to data-loss
  • - *
  • consistency & correctness, the previous implementation was prone to produce different results on a shards replica which can essentially result in a corrupted index
  • - *
  • resiliency, the previous implementation could cause OOM errors, merge-storms and dramatic slowdowns if used incorrectly
  • - *
- * - * While delete-by-query is a very useful feature, it's implementation is very tricky in system that is based on per-document modifications. The move towards - * a plugin based solution was mainly done to minimize the risk of cluster failures or corrupted indices which where easily possible wiht the previous implementation. - * Users that rely delete by query should install the plugin in oder to use this functionality. - */ -public class DeleteByQueryRequest extends ActionRequest implements IndicesRequest.Replaceable { - - private String[] indices = Strings.EMPTY_ARRAY; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); - - private String[] types = Strings.EMPTY_ARRAY; - - private QueryBuilder query; - - private String routing; - - private int size = 0; - - private Scroll scroll = new Scroll(TimeValue.timeValueMinutes(10)); - - private TimeValue timeout; - - public DeleteByQueryRequest() { - } - - /** - * Constructs a new delete by query request to run against the provided indices. No indices means - * it will run against all indices. - */ - public DeleteByQueryRequest(String... indices) { - this.indices = indices; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (query == null) { - validationException = addValidationError("source is missing", validationException); - } - return validationException; - } - - @Override - public String[] indices() { - return this.indices; - } - - @Override - public DeleteByQueryRequest indices(String... indices) { - this.indices = indices; - return this; - } - - @Override - public IndicesOptions indicesOptions() { - return indicesOptions; - } - - public DeleteByQueryRequest indicesOptions(IndicesOptions indicesOptions) { - if (indicesOptions == null) { - throw new IllegalArgumentException("IndicesOptions must not be null"); - } - this.indicesOptions = indicesOptions; - return this; - } - - public String[] types() { - return this.types; - } - - public DeleteByQueryRequest types(String... types) { - this.types = types; - return this; - } - - public QueryBuilder query() { - return query; - } - - public DeleteByQueryRequest query(QueryBuilder queryBuilder) { - this.query = queryBuilder; - return this; - } - - public String routing() { - return this.routing; - } - - public DeleteByQueryRequest routing(String routing) { - this.routing = routing; - return this; - } - - public DeleteByQueryRequest routing(String... routings) { - this.routing = Strings.arrayToCommaDelimitedString(routings); - return this; - } - - public DeleteByQueryRequest size(int size) { - if (size < 0) { - throw new IllegalArgumentException("size must be greater than zero"); - } - this.size = size; - return this; - } - - public int size() { - return size; - } - - - public Scroll scroll() { - return scroll; - } - - public DeleteByQueryRequest scroll(Scroll scroll) { - this.scroll = scroll; - return this; - } - - public DeleteByQueryRequest scroll(TimeValue keepAlive) { - return scroll(new Scroll(keepAlive)); - } - - public DeleteByQueryRequest scroll(String keepAlive) { - return scroll(new Scroll(TimeValue.parseTimeValue(keepAlive, null, getClass().getSimpleName() + ".keepAlive"))); - } - - public TimeValue timeout() { - return timeout; - } - - public DeleteByQueryRequest timeout(TimeValue timeout) { - if (timeout == null) { - throw new IllegalArgumentException("timeout must not be null"); - } - this.timeout = timeout; - return this; - } - - public DeleteByQueryRequest timeout(String timeout) { - timeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout")); - return this; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - indices = in.readStringArray(); - indicesOptions = IndicesOptions.readIndicesOptions(in); - types = in.readStringArray(); - query = in.readNamedWriteable(QueryBuilder.class); - routing = in.readOptionalString(); - size = in.readVInt(); - if (in.readBoolean()) { - scroll = readScroll(in); - } - if (in.readBoolean()) { - timeout = TimeValue.readTimeValue(in); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(indices); - indicesOptions.writeIndicesOptions(out); - out.writeStringArray(types); - out.writeNamedWriteable(query); - out.writeOptionalString(routing); - out.writeVInt(size); - out.writeOptionalStreamable(scroll); - out.writeOptionalStreamable(timeout); - } - - @Override - public String toString() { - return "delete-by-query indices:" + Arrays.toString(indices) + - ", types:" + Arrays.toString(types) + - ", size:" + size + - ", timeout:" + timeout + - ", routing:" + routing + - ", query:" + query; - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java deleted file mode 100644 index dc5ba3a15fb..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryRequestBuilder.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; - -/** - * Creates a new {@link DeleteByQueryRequestBuilder} - * @see DeleteByQueryRequest - */ -public class DeleteByQueryRequestBuilder extends ActionRequestBuilder { - - public DeleteByQueryRequestBuilder(ElasticsearchClient client, DeleteByQueryAction action) { - super(client, action, new DeleteByQueryRequest()); - } - - public DeleteByQueryRequestBuilder setIndices(String... indices) { - request.indices(indices); - return this; - } - - /** - * Specifies what type of requested indices to ignore and wildcard indices expressions. - *

- * For example indices that don't exist. - */ - public DeleteByQueryRequestBuilder setIndicesOptions(IndicesOptions options) { - request.indicesOptions(options); - return this; - } - - /** - * The query used to delete documents. - * - * @see org.elasticsearch.index.query.QueryBuilders - */ - public DeleteByQueryRequestBuilder setQuery(QueryBuilder queryBuilder) { - request.query(queryBuilder); - return this; - } - - /** - * A comma separated list of routing values to control the shards the action will be executed on. - */ - public DeleteByQueryRequestBuilder setRouting(String routing) { - request.routing(routing); - return this; - } - - /** - * The routing values to control the shards that the action will be executed on. - */ - public DeleteByQueryRequestBuilder setRouting(String... routing) { - request.routing(routing); - return this; - } - - /** - * An optional timeout to control how long the delete by query is allowed to take. - */ - public DeleteByQueryRequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); - return this; - } - - /** - * An optional timeout to control how long the delete by query is allowed to take. - */ - public DeleteByQueryRequestBuilder setTimeout(String timeout) { - request.timeout(timeout); - return this; - } - - /** - * The types of documents the query will run against. Defaults to all types. - */ - public DeleteByQueryRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryResponse.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryResponse.java deleted file mode 100644 index 80fae396a25..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/DeleteByQueryResponse.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; - -/** - * Delete by query response - * @see DeleteByQueryRequest - */ -public class DeleteByQueryResponse extends ActionResponse implements ToXContent { - - private long tookInMillis; - private boolean timedOut = false; - - private long found; - private long deleted; - private long missing; - private long failed; - - private IndexDeleteByQueryResponse[] indices = IndexDeleteByQueryResponse.EMPTY_ARRAY; - private ShardOperationFailedException[] shardFailures = ShardSearchFailure.EMPTY_ARRAY; - - DeleteByQueryResponse() { - } - - DeleteByQueryResponse(long tookInMillis, boolean timedOut, long found, long deleted, long missing, long failed, IndexDeleteByQueryResponse[] indices, ShardOperationFailedException[] shardFailures) { - this.tookInMillis = tookInMillis; - this.timedOut = timedOut; - this.found = found; - this.deleted = deleted; - this.missing = missing; - this.failed = failed; - this.indices = indices; - this.shardFailures = shardFailures; - } - - /** - * The responses from all the different indices. - */ - public IndexDeleteByQueryResponse[] getIndices() { - return indices; - } - - /** - * The response of a specific index. - */ - public IndexDeleteByQueryResponse getIndex(String index) { - if (index == null) { - return null; - } - for (IndexDeleteByQueryResponse i : indices) { - if (index.equals(i.getIndex())) { - return i; - } - } - return null; - } - - public TimeValue getTook() { - return new TimeValue(tookInMillis); - } - - public long getTookInMillis() { - return tookInMillis; - } - - public boolean isTimedOut() { - return this.timedOut; - } - - public long getTotalFound() { - return found; - } - - public long getTotalDeleted() { - return deleted; - } - - public long getTotalMissing() { - return missing; - } - - public long getTotalFailed() { - return failed; - } - - public ShardOperationFailedException[] getShardFailures() { - return shardFailures; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - tookInMillis = in.readVLong(); - timedOut = in.readBoolean(); - found = in.readVLong(); - deleted = in.readVLong(); - missing = in.readVLong(); - failed = in.readVLong(); - - int size = in.readVInt(); - indices = new IndexDeleteByQueryResponse[size]; - for (int i = 0; i < size; i++) { - IndexDeleteByQueryResponse index = new IndexDeleteByQueryResponse(); - index.readFrom(in); - indices[i] = index; - } - - size = in.readVInt(); - if (size == 0) { - shardFailures = ShardSearchFailure.EMPTY_ARRAY; - } else { - shardFailures = new ShardSearchFailure[size]; - for (int i = 0; i < shardFailures.length; i++) { - shardFailures[i] = readShardSearchFailure(in); - } - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVLong(tookInMillis); - out.writeBoolean(timedOut); - out.writeVLong(found); - out.writeVLong(deleted); - out.writeVLong(missing); - out.writeVLong(failed); - - out.writeVInt(indices.length); - for (IndexDeleteByQueryResponse indexResponse : indices) { - indexResponse.writeTo(out); - } - - out.writeVInt(shardFailures.length); - for (ShardOperationFailedException shardSearchFailure : shardFailures) { - shardSearchFailure.writeTo(out); - } - } - - static final class Fields { - static final String TOOK = "took"; - static final String TIMED_OUT = "timed_out"; - static final String INDICES = "_indices"; - static final String FAILURES = "failures"; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.TOOK, tookInMillis); - builder.field(Fields.TIMED_OUT, timedOut); - - builder.startObject(Fields.INDICES); - IndexDeleteByQueryResponse all = new IndexDeleteByQueryResponse("_all", found, deleted, missing, failed); - all.toXContent(builder, params); - for (IndexDeleteByQueryResponse indexResponse : indices) { - indexResponse.toXContent(builder, params); - } - builder.endObject(); - - builder.startArray(Fields.FAILURES); - if (shardFailures != null) { - for (ShardOperationFailedException shardFailure : shardFailures) { - builder.startObject(); - shardFailure.toXContent(builder, params); - builder.endObject(); - } - } - builder.endArray(); - return builder; - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponse.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponse.java deleted file mode 100644 index 78fca1ef5f1..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponse.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * Delete by query response executed on a specific index. - */ -public class IndexDeleteByQueryResponse extends ActionResponse implements ToXContent { - - public static final IndexDeleteByQueryResponse[] EMPTY_ARRAY = new IndexDeleteByQueryResponse[0]; - - private String index; - - private long found = 0L; - private long deleted = 0L; - private long missing = 0L; - private long failed = 0L; - - IndexDeleteByQueryResponse() { - } - - IndexDeleteByQueryResponse(String index) { - this.index = index; - } - - /** - * Instantiates an IndexDeleteByQueryResponse with given values for counters. Counters should not be negative. - */ - public IndexDeleteByQueryResponse(String index, long found, long deleted, long missing, long failed) { - this(index); - incrementFound(found); - incrementDeleted(deleted); - incrementMissing(missing); - incrementFailed(failed); - } - - public String getIndex() { - return this.index; - } - - public long getFound() { - return found; - } - - public void incrementFound() { - incrementFound(1L); - } - - public void incrementFound(long delta) { - assert (found + delta >= 0) : "counter 'found' cannot be negative"; - this.found = found + delta; - } - - public long getDeleted() { - return deleted; - } - - public void incrementDeleted() { - incrementDeleted(1L); - } - - public void incrementDeleted(long delta) { - assert (deleted + delta >= 0) : "counter 'deleted' cannot be negative"; - this.deleted = deleted + delta; - } - - public long getMissing() { - return missing; - } - - public void incrementMissing() { - incrementMissing(1L); - } - - public void incrementMissing(long delta) { - assert (missing + delta >= 0) : "counter 'missing' cannot be negative"; - this.missing = missing + delta; - } - - public long getFailed() { - return failed; - } - - public void incrementFailed() { - incrementFailed(1L); - } - - public void incrementFailed(long delta) { - assert (failed + delta >= 0) : "counter 'failed' cannot be negative"; - this.failed = failed + delta; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - index = in.readString(); - found = in.readVLong(); - deleted = in.readVLong(); - missing = in.readVLong(); - failed = in.readVLong(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(index); - out.writeVLong(found); - out.writeVLong(deleted); - out.writeVLong(missing); - out.writeVLong(failed); - } - - static final class Fields { - static final String FOUND = "found"; - static final String DELETED = "deleted"; - static final String MISSING = "missing"; - static final String FAILED = "failed"; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(index); - builder.field(Fields.FOUND, found); - builder.field(Fields.DELETED, deleted); - builder.field(Fields.MISSING, missing); - builder.field(Fields.FAILED, failed); - builder.endObject(); - return builder; - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java deleted file mode 100644 index f4127c4e532..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryAction.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.search.TransportSearchScrollAction; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -/** - * Delete-By-Query implementation that uses efficient scrolling and bulks deletions to delete large set of documents. - */ -public class TransportDeleteByQueryAction extends HandledTransportAction { - - private final TransportSearchAction searchAction; - private final TransportSearchScrollAction scrollAction; - private final Client client; - - @Inject - public TransportDeleteByQueryAction(Settings settings, ThreadPool threadPool, Client client, - TransportSearchAction transportSearchAction, - TransportSearchScrollAction transportSearchScrollAction, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, DeleteByQueryAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, DeleteByQueryRequest::new); - this.searchAction = transportSearchAction; - this.scrollAction = transportSearchScrollAction; - this.client = client; - } - - @Override - protected void doExecute(DeleteByQueryRequest request, ActionListener listener) { - new AsyncDeleteByQueryAction(request, listener).start(); - } - - class AsyncDeleteByQueryAction { - - private final DeleteByQueryRequest request; - private final ActionListener listener; - - private final long startTime; - private final AtomicBoolean timedOut; - private final AtomicLong total; - - private volatile ShardOperationFailedException[] shardFailures; - private final Map results; - - AsyncDeleteByQueryAction(DeleteByQueryRequest request, ActionListener listener) { - this.request = request; - this.listener = listener; - this.startTime = threadPool.estimatedTimeInMillis(); - this.timedOut = new AtomicBoolean(false); - this.total = new AtomicLong(0L); - this.shardFailures = ShardSearchFailure.EMPTY_ARRAY; - this.results = new HashMap<>(); - } - - public void start() { - executeScan(); - } - - void executeScan() { - try { - final SearchRequest scanRequest = new SearchRequest() - .indices(request.indices()) - .types(request.types()) - .indicesOptions(request.indicesOptions()) - .scroll(request.scroll()); - if (request.routing() != null) { - scanRequest.routing(request.routing()); - } - - List fields = new ArrayList<>(); - fields.add("_routing"); - fields.add("_parent"); - SearchSourceBuilder source = new SearchSourceBuilder() - .query(request.query()) - .fields(fields) - .sort("_doc") // important for performance - .fetchSource(false) - .version(true); - if (request.size() > 0) { - source.size(request.size()); - } - if (request.timeout() != null) { - source.timeout(request.timeout()); - } - scanRequest.source(source); - - logger.trace("executing scan request"); - searchAction.execute(scanRequest, new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - long hits = searchResponse.getHits().getTotalHits(); - logger.trace("first request executed: found [{}] document(s) to delete", hits); - total.set(hits); - deleteHits(null, searchResponse); - } - - @Override - public void onFailure(Throwable e) { - listener.onFailure(e); - } - }); - } catch (Throwable t) { - logger.error("unable to execute the initial scan request of delete by query", t); - listener.onFailure(t); - } - } - - void executeScroll(final String scrollId) { - try { - logger.trace("executing scroll request [{}]", scrollId); - scrollAction.execute(new SearchScrollRequest().scrollId(scrollId).scroll(request.scroll()), new ActionListener() { - @Override - public void onResponse(SearchResponse scrollResponse) { - deleteHits(scrollId, scrollResponse); - } - - @Override - public void onFailure(Throwable e) { - logger.error("scroll request [{}] failed, scrolling document(s) is stopped", e, scrollId); - finishHim(scrollId, hasTimedOut(), e); - } - }); - } catch (Throwable t) { - logger.error("unable to execute scroll request [{}]", t, scrollId); - finishHim(scrollId, false, t); - } - } - - void deleteHits(String scrollId, SearchResponse scrollResponse) { - final SearchHit[] docs = scrollResponse.getHits().getHits(); - final String nextScrollId = scrollResponse.getScrollId(); - addShardFailures(scrollResponse.getShardFailures()); - - if (logger.isTraceEnabled()) { - logger.trace("scroll request [{}] executed: [{}] document(s) returned", scrollId, docs.length); - } - - if ((docs.length == 0) || (nextScrollId == null)) { - logger.trace("scrolling documents terminated"); - // if scrollId is null we are on the first request - just pass the nextScrollId which sill be non-null if the query matched no docs - finishHim(scrollId == null ? nextScrollId : scrollId, false, null); - return; - } - - if (hasTimedOut()) { - logger.trace("scrolling documents timed out"); - // if scrollId is null we are on the first request - just pass the nextScrollId which sill be non-null if the query matched no docs - finishHim(scrollId == null ? nextScrollId : scrollId, true, null); - return; - } - - // Delete the scrolled documents using the Bulk API - BulkRequest bulkRequest = new BulkRequest(); - for (SearchHit doc : docs) { - DeleteRequest delete = new DeleteRequest().index(doc.index()).type(doc.type()).id(doc.id()).version(doc.version()); - SearchHitField routing = doc.field("_routing"); - if (routing != null) { - delete.routing((String) routing.value()); - } - SearchHitField parent = doc.field("_parent"); - if (parent != null) { - delete.parent((String) parent.value()); - } - bulkRequest.add(delete); - } - - logger.trace("executing bulk request with [{}] deletions", bulkRequest.numberOfActions()); - client.bulk(bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - onBulkResponse(nextScrollId, bulkResponse); - } - - @Override - public void onFailure(Throwable e) { - onBulkFailure(nextScrollId, docs, e); - } - }); - } - - void onBulkResponse(String scrollId, BulkResponse bulkResponse) { - try { - for (BulkItemResponse item : bulkResponse.getItems()) { - IndexDeleteByQueryResponse indexCounter = results.get(item.getIndex()); - if (indexCounter == null) { - indexCounter = new IndexDeleteByQueryResponse(item.getIndex()); - } - indexCounter.incrementFound(); - if (item.isFailed()) { - indexCounter.incrementFailed(); - } else { - DeleteResponse delete = item.getResponse(); - if (delete.isFound()) { - indexCounter.incrementDeleted(); - } else { - indexCounter.incrementMissing(); - } - } - results.put(item.getIndex(), indexCounter); - } - - logger.trace("scrolling next batch of document(s) with scroll id [{}]", scrollId); - executeScroll(scrollId); - } catch (Throwable t) { - logger.error("unable to process bulk response", t); - finishHim(scrollId, false, t); - } - } - - void onBulkFailure(String scrollId, SearchHit[] docs, Throwable failure) { - try { - logger.trace("execution of scroll request failed: {}", failure.getMessage()); - for (SearchHit doc : docs) { - IndexDeleteByQueryResponse indexCounter = results.get(doc.index()); - if (indexCounter == null) { - indexCounter = new IndexDeleteByQueryResponse(doc.index()); - } - indexCounter.incrementFound(); - indexCounter.incrementFailed(); - results.put(doc.getIndex(), indexCounter); - } - - logger.trace("scrolling document terminated due to scroll request failure [{}]", scrollId); - finishHim(scrollId, hasTimedOut(), failure); - } catch (Throwable t) { - logger.error("unable to process bulk failure", t); - finishHim(scrollId, false, t); - } - } - - void finishHim(final String scrollId, boolean scrollTimedOut, Throwable failure) { - try { - if (scrollTimedOut) { - logger.trace("delete-by-query response marked as timed out"); - timedOut.set(true); - } - - if (Strings.hasText(scrollId)) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - client.clearScroll(clearScrollRequest, new ActionListener() { - @Override - public void onResponse(ClearScrollResponse clearScrollResponse) { - logger.trace("scroll id [{}] cleared", scrollId); - } - - @Override - public void onFailure(Throwable e) { - logger.warn("unable to clear scroll id [{}]: {}", scrollId, e.getMessage()); - } - }); - } - - if (failure != null) { - logger.trace("scrolling document(s) terminated with failures: {}", failure.getMessage()); - listener.onFailure(failure); - } else { - logger.trace("scrolling document(s) terminated with success"); - listener.onResponse(buildResponse()); - } - } catch (Throwable t) { - listener.onFailure(t); - } - } - - boolean hasTimedOut() { - return request.timeout() != null && (threadPool.estimatedTimeInMillis() >= (startTime + request.timeout().millis())); - } - - void addShardFailures(ShardOperationFailedException[] failures) { - if (!CollectionUtils.isEmpty(failures)) { - ShardOperationFailedException[] duplicates = new ShardOperationFailedException[shardFailures.length + failures.length]; - System.arraycopy(shardFailures, 0, duplicates, 0, shardFailures.length); - System.arraycopy(failures, 0, duplicates, shardFailures.length, failures.length); - shardFailures = ExceptionsHelper.groupBy(duplicates); - } - } - - protected DeleteByQueryResponse buildResponse() { - long took = threadPool.estimatedTimeInMillis() - startTime; - long deleted = 0; - long missing = 0; - long failed = 0; - - // Calculates the total number deleted/failed/missing documents - for (IndexDeleteByQueryResponse result : results.values()) { - deleted = deleted + result.getDeleted(); - missing = missing + result.getMissing(); - failed = failed + result.getFailed(); - } - IndexDeleteByQueryResponse[] indices = results.values().toArray(new IndexDeleteByQueryResponse[results.size()]); - return new DeleteByQueryResponse(took, timedOut.get(), total.get(), deleted, missing, failed, indices, shardFailures); - } - } -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java deleted file mode 100644 index 8395223f669..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryPlugin.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.deletebyquery; - -import org.elasticsearch.action.ActionModule; -import org.elasticsearch.action.deletebyquery.DeleteByQueryAction; -import org.elasticsearch.action.deletebyquery.TransportDeleteByQueryAction; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.action.deletebyquery.RestDeleteByQueryAction; - -public class DeleteByQueryPlugin extends Plugin { - - public static final String NAME = "delete-by-query"; - - @Override - public String name() { - return NAME; - } - - @Override - public String description() { - return "Elasticsearch Delete-By-Query Plugin"; - } - - public void onModule(ActionModule actionModule) { - actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); - } - - public void onModule(NetworkModule module) { - module.registerRestHandler(RestDeleteByQueryAction.class); - } - -} diff --git a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java b/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java deleted file mode 100644 index 2b537d1cf8a..00000000000 --- a/plugins/delete-by-query/src/main/java/org/elasticsearch/rest/action/deletebyquery/RestDeleteByQueryAction.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.action.deletebyquery; - -import org.elasticsearch.action.deletebyquery.DeleteByQueryRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestActions; -import org.elasticsearch.rest.action.support.RestToXContentListener; - -import java.io.IOException; - -import static org.elasticsearch.action.deletebyquery.DeleteByQueryAction.INSTANCE; -import static org.elasticsearch.rest.RestRequest.Method.DELETE; - -/** - * @see DeleteByQueryRequest - */ -public class RestDeleteByQueryAction extends BaseRestHandler { - - private IndicesQueriesRegistry indicesQueriesRegistry; - - @Inject - public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, - IndicesQueriesRegistry indicesQueriesRegistry) { - super(settings, client); - this.indicesQueriesRegistry = indicesQueriesRegistry; - controller.registerHandler(DELETE, "/{index}/_query", this); - controller.registerHandler(DELETE, "/{index}/{type}/_query", this); - } - - @Override - public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException { - DeleteByQueryRequest delete = new DeleteByQueryRequest(Strings.splitStringByCommaToArray(request.param("index"))); - delete.indicesOptions(IndicesOptions.fromRequest(request, delete.indicesOptions())); - delete.routing(request.param("routing")); - if (request.hasParam("timeout")) { - delete.timeout(request.paramAsTime("timeout", null)); - } - if (RestActions.hasBodyContent(request)) { - delete.query(RestActions.getQueryContent(RestActions.getRestContent(request), indicesQueriesRegistry, parseFieldMatcher)); - } else { - QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request); - if (queryBuilder != null) { - delete.query(queryBuilder); - } - } - delete.types(Strings.splitStringByCommaToArray(request.param("type"))); - client.execute(INSTANCE, delete, new RestToXContentListener<>(channel)); - } -} diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponseTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponseTests.java deleted file mode 100644 index ea814b44f5d..00000000000 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/IndexDeleteByQueryResponseTests.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; - -import static org.hamcrest.Matchers.equalTo; - -public class IndexDeleteByQueryResponseTests extends ESTestCase { - public void testIncrements() { - String indexName = randomAsciiOfLength(5); - - // Use randomInt to prevent range overflow - long found = Math.abs(randomInt()); - long deleted = Math.abs(randomInt()); - long missing = Math.abs(randomInt()); - long failed = Math.abs(randomInt()); - - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(indexName, found, deleted, missing, failed); - assertThat(response.getIndex(), equalTo(indexName)); - assertThat(response.getFound(), equalTo(found)); - assertThat(response.getDeleted(), equalTo(deleted)); - assertThat(response.getMissing(), equalTo(missing)); - assertThat(response.getFailed(), equalTo(failed)); - - response.incrementFound(); - response.incrementDeleted(); - response.incrementMissing(); - response.incrementFailed(); - - assertThat(response.getFound(), equalTo(found + 1)); - assertThat(response.getDeleted(), equalTo(deleted + 1)); - assertThat(response.getMissing(), equalTo(missing + 1)); - assertThat(response.getFailed(), equalTo(failed + 1)); - - // Use randomInt to prevent range overflow - long inc = randomIntBetween(0, 1000); - response.incrementFound(inc); - response.incrementDeleted(inc); - response.incrementMissing(inc); - response.incrementFailed(inc); - - assertThat(response.getFound(), equalTo(found + 1 + inc)); - assertThat(response.getDeleted(), equalTo(deleted + 1 + inc)); - assertThat(response.getMissing(), equalTo(missing + 1 + inc)); - assertThat(response.getFailed(), equalTo(failed + 1 + inc)); - } - - public void testNegativeCounters() { - assumeTrue("assertions must be enable for this test to pass", assertionsEnabled()); - try { - new IndexDeleteByQueryResponse("index", -1L, 0L, 0L, 0L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'found' cannot be negative"), equalTo(true)); - } - - try { - new IndexDeleteByQueryResponse("index", 0L, -1L, 0L, 0L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'deleted' cannot be negative"), equalTo(true)); - } - - try { - new IndexDeleteByQueryResponse("index", 0L, 0L, -1L, 0L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'missing' cannot be negative"), equalTo(true)); - } - - try { - new IndexDeleteByQueryResponse("index", 0L, 0L, 0L, -1L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'failed' cannot be negative"), equalTo(true)); - } - } - - public void testNegativeIncrements() { - assumeTrue("assertions must be enable for this test to pass", assertionsEnabled()); - try { - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(); - response.incrementFound(-10L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'found' cannot be negative"), equalTo(true)); - } - - try { - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(); - response.incrementDeleted(-10L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'deleted' cannot be negative"), equalTo(true)); - } - - try { - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(); - response.incrementMissing(-10L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'missing' cannot be negative"), equalTo(true)); - } - - try { - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(); - response.incrementFailed(-1L); - fail("should have thrown an assertion error concerning the negative counter"); - } catch (AssertionError e) { - assertThat("message contains error about a negative counter: " + e.getMessage(), - e.getMessage().contains("counter 'failed' cannot be negative"), equalTo(true)); - } - } - - public void testSerialization() throws Exception { - IndexDeleteByQueryResponse response = new IndexDeleteByQueryResponse(randomAsciiOfLength(5), Math.abs(randomLong()), Math.abs(randomLong()), Math.abs(randomLong()), Math.abs(randomLong())); - Version testVersion = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT); - BytesStreamOutput output = new BytesStreamOutput(); - output.setVersion(testVersion); - response.writeTo(output); - - StreamInput streamInput = StreamInput.wrap(output.bytes()); - streamInput.setVersion(testVersion); - IndexDeleteByQueryResponse deserializedResponse = new IndexDeleteByQueryResponse(); - deserializedResponse.readFrom(streamInput); - - assertThat(deserializedResponse.getIndex(), equalTo(response.getIndex())); - assertThat(deserializedResponse.getFound(), equalTo(response.getFound())); - assertThat(deserializedResponse.getDeleted(), equalTo(response.getDeleted())); - assertThat(deserializedResponse.getMissing(), equalTo(response.getMissing())); - assertThat(deserializedResponse.getFailed(), equalTo(response.getFailed())); - } -} diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java deleted file mode 100644 index 980ee76c2ce..00000000000 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/action/deletebyquery/TransportDeleteByQueryActionTests.java +++ /dev/null @@ -1,458 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.deletebyquery; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.common.util.concurrent.CountDown; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.test.ESSingleNodeTestCase; - -import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class TransportDeleteByQueryActionTests extends ESSingleNodeTestCase { - public void testExecuteScanFailsOnMissingIndex() { - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"none"}); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScan(); - waitForCompletion("scan request should fail on missing index", listener); - - assertFailure(listener, "no such index"); - assertSearchContextsClosed(); - } - - public void testExecuteScan() { - createIndex("test"); - final int numDocs = randomIntBetween(1, 200); - for (int i = 1; i <= numDocs; i++) { - client().prepareIndex("test", "type").setSource("num", i).get(); - } - client().admin().indices().prepareRefresh("test").get(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), numDocs); - - final long limit = randomIntBetween(0, numDocs); - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"test"}).query(boolQuery().must(rangeQuery("num").lte(limit))); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScan(); - waitForCompletion("scan request should return the exact number of documents", listener); - - assertNoFailures(listener); - DeleteByQueryResponse response = listener.getResponse(); - assertNotNull(response); - assertThat(response.getTotalFound(), equalTo(limit)); - assertThat(response.getTotalDeleted(), equalTo(limit)); - assertSearchContextsClosed(); - } - - public void testExecuteScrollFailsOnMissingScrollId() { - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScroll(null); - waitForCompletion("scroll request should fail on missing scroll id", listener); - - assertFailure(listener, "scrollId is missing"); - assertSearchContextsClosed(); - } - - public void testExecuteScrollFailsOnMalformedScrollId() { - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScroll("123"); - waitForCompletion("scroll request should fail on malformed scroll id", listener); - - assertFailure(listener, "Failed to decode scrollId"); - assertSearchContextsClosed(); - } - - public void testExecuteScrollFailsOnExpiredScrollId() { - final long numDocs = randomIntBetween(1, 100); - for (int i = 1; i <= numDocs; i++) { - client().prepareIndex("test", "type").setSource("num", i).get(); - } - client().admin().indices().prepareRefresh("test").get(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), numDocs); - - SearchResponse searchResponse = client().prepareSearch("test").setScroll(TimeValue.timeValueSeconds(10)).get(); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(numDocs)); - - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - - ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(scrollId).get(); - assertTrue(clearScrollResponse.isSucceeded()); - - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"test"}); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScroll(searchResponse.getScrollId()); - waitForCompletion("scroll request returns zero documents on expired scroll id", listener); - - assertNotNull(listener.getError()); - assertThrowableContains(listener.getError(), "No search context found"); - assertSearchContextsClosed(); - } - - public void testExecuteScrollTimedOut() throws InterruptedException { - client().prepareIndex("test", "type", "1").setSource("num", "1").get(); - client().prepareIndex("test", "type", "2").setSource("num", "1").get(); - client().admin().indices().prepareRefresh("test").get(); - - SearchResponse searchResponse = client().prepareSearch("test").setSize(1).setScroll(TimeValue.timeValueSeconds(10)).get(); - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"test"}).timeout(TimeValue.timeValueSeconds(1)); - TestActionListener listener = new TestActionListener(); - - final TransportDeleteByQueryAction.AsyncDeleteByQueryAction async = newAsyncAction(delete, listener); - // Wait until the action timed out - awaitBusy(() -> async.hasTimedOut()); - - async.executeScroll(searchResponse.getScrollId()); - waitForCompletion("scroll request returns zero documents on expired scroll id", listener); - - assertNull(listener.getError()); - assertTrue(listener.getResponse().isTimedOut()); - assertThat(listener.getResponse().getTotalDeleted(), equalTo(0L)); - assertSearchContextsClosed(); - } - - public void testExecuteScrollNoDocuments() { - createIndex("test"); - SearchResponse searchResponse = client().prepareSearch("test").setScroll(TimeValue.timeValueSeconds(10)).get(); - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"test"}); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScroll(searchResponse.getScrollId()); - waitForCompletion("scroll request returns zero documents", listener); - - assertNull(listener.getError()); - assertFalse(listener.getResponse().isTimedOut()); - assertThat(listener.getResponse().getTotalFound(), equalTo(0L)); - assertThat(listener.getResponse().getTotalDeleted(), equalTo(0L)); - assertSearchContextsClosed(); - } - - public void testExecuteScroll() { - final int numDocs = randomIntBetween(1, 100); - for (int i = 1; i <= numDocs; i++) { - client().prepareIndex("test", "type").setSource("num", i).get(); - } - client().admin().indices().prepareRefresh("test").get(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), numDocs); - - final long limit = randomIntBetween(0, numDocs); - - SearchResponse searchResponse = client().prepareSearch("test") - .setScroll(TimeValue.timeValueSeconds(10)) - .setQuery(boolQuery().must(rangeQuery("num").lte(limit))) - .fields("_routing", "_parent") - .setFetchSource(false) - .setVersion(true) - .get(); - - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(limit)); - - DeleteByQueryRequest delete = new DeleteByQueryRequest().indices(new String[]{"test"}).size(100).query(boolQuery().must(rangeQuery("num").lte(limit))); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).executeScroll(searchResponse.getScrollId()); - waitForCompletion("scroll request should return all documents", listener); - - assertNull(listener.getError()); - assertFalse(listener.getResponse().isTimedOut()); - // docs that have been returned on the 1st page have been skipped - final long expectedDeleted = Math.max(0, limit - searchResponse.getHits().hits().length); - assertThat(listener.getResponse().getTotalDeleted(), equalTo(expectedDeleted)); - assertSearchContextsClosed(); - } - - public void testOnBulkResponse() { - final int nbItems = randomIntBetween(0, 20); - long deleted = 0; - long missing = 0; - long failed = 0; - - BulkItemResponse[] items = new BulkItemResponse[nbItems]; - for (int i = 0; i < nbItems; i++) { - if (randomBoolean()) { - boolean delete = true; - if (rarely()) { - delete = false; - missing++; - } else { - deleted++; - } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test", "_na_", 0), "type", String.valueOf(i), 1, delete)); - } else { - items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test", "type", String.valueOf(i), new Throwable("item failed"))); - failed++; - } - } - - // We just need a valid scroll id - createIndex("test"); - SearchResponse searchResponse = client().prepareSearch().setScroll(TimeValue.timeValueSeconds(10)).get(); - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - - try { - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).onBulkResponse(scrollId, new BulkResponse(items, 0L)); - waitForCompletion("waiting for bulk response to complete", listener); - - assertNoFailures(listener); - assertThat(listener.getResponse().getTotalDeleted(), equalTo(deleted)); - assertThat(listener.getResponse().getTotalFailed(), equalTo(failed)); - assertThat(listener.getResponse().getTotalMissing(), equalTo(missing)); - } finally { - client().prepareClearScroll().addScrollId(scrollId).get(); - } - } - - public void testOnBulkResponseMultipleIndices() { - final int nbIndices = randomIntBetween(2, 5); - - // Holds counters for the total + all indices - final long[] found = new long[1 + nbIndices]; - final long[] deleted = new long[1 + nbIndices]; - final long[] missing = new long[1 + nbIndices]; - final long[] failed = new long[1 + nbIndices]; - - final int nbItems = randomIntBetween(0, 100); - found[0] = nbItems; - - BulkItemResponse[] items = new BulkItemResponse[nbItems]; - for (int i = 0; i < nbItems; i++) { - int index = randomIntBetween(1, nbIndices); - found[index] = found[index] + 1; - - if (randomBoolean()) { - boolean delete = true; - if (rarely()) { - delete = false; - missing[0] = missing[0] + 1; - missing[index] = missing[index] + 1; - } else { - deleted[0] = deleted[0] + 1; - deleted[index] = deleted[index] + 1; - } - items[i] = new BulkItemResponse(i, "delete", new DeleteResponse(new ShardId("test-" + index, "_na_", 0), "type", String.valueOf(i), 1, delete)); - } else { - items[i] = new BulkItemResponse(i, "delete", new BulkItemResponse.Failure("test-" + index, "type", String.valueOf(i), new Throwable("item failed"))); - failed[0] = failed[0] + 1; - failed[index] = failed[index] + 1; - } - } - - // We just need a valid scroll id - createIndex("test"); - SearchResponse searchResponse = client().prepareSearch().setScroll(TimeValue.timeValueSeconds(10)).get(); - String scrollId = searchResponse.getScrollId(); - assertTrue(Strings.hasText(scrollId)); - - try { - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).onBulkResponse(scrollId, new BulkResponse(items, 0L)); - waitForCompletion("waiting for bulk response to complete", listener); - - assertNoFailures(listener); - assertThat(listener.getResponse().getTotalDeleted(), equalTo(deleted[0])); - assertThat(listener.getResponse().getTotalFailed(), equalTo(failed[0])); - assertThat(listener.getResponse().getTotalMissing(), equalTo(missing[0])); - - for (int i = 1; i <= nbIndices; i++) { - IndexDeleteByQueryResponse indexResponse = listener.getResponse().getIndex("test-" + i); - if (found[i] >= 1) { - assertNotNull(indexResponse); - assertThat(indexResponse.getFound(), equalTo(found[i])); - assertThat(indexResponse.getDeleted(), equalTo(deleted[i])); - assertThat(indexResponse.getFailed(), equalTo(failed[i])); - assertThat(indexResponse.getMissing(), equalTo(missing[i])); - } else { - assertNull(indexResponse); - } - } - } finally { - client().prepareClearScroll().addScrollId(scrollId).get(); - } - } - - public void testOnBulkFailureNoDocuments() { - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - newAsyncAction(delete, listener).onBulkFailure(null, new SearchHit[0], new Throwable("This is a bulk failure")); - waitForCompletion("waiting for bulk failure to complete", listener); - - assertFailure(listener, "This is a bulk failure"); - } - - public void testOnBulkFailure() { - final int nbDocs = randomIntBetween(0, 20); - SearchHit[] docs = new SearchHit[nbDocs]; - for (int i = 0; i < nbDocs; i++) { - InternalSearchHit doc = new InternalSearchHit(randomInt(), String.valueOf(i), new Text("type"), null); - doc.shard(new SearchShardTarget("node", new Index("test", "_na_"), randomInt())); - docs[i] = doc; - } - - DeleteByQueryRequest delete = new DeleteByQueryRequest(); - TestActionListener listener = new TestActionListener(); - - TransportDeleteByQueryAction.AsyncDeleteByQueryAction async = newAsyncAction(delete, listener); - async.onBulkFailure(null, docs, new Throwable("This is a bulk failure")); - waitForCompletion("waiting for bulk failure to complete", listener); - assertFailure(listener, "This is a bulk failure"); - - DeleteByQueryResponse response = async.buildResponse(); - assertThat(response.getTotalFailed(), equalTo((long) nbDocs)); - assertThat(response.getTotalDeleted(), equalTo(0L)); - } - - public void testFinishHim() { - TestActionListener listener = new TestActionListener(); - newAsyncAction(new DeleteByQueryRequest(), listener).finishHim(null, false, null); - waitForCompletion("waiting for finishHim to complete with success", listener); - assertNoFailures(listener); - assertNotNull(listener.getResponse()); - assertFalse(listener.getResponse().isTimedOut()); - - listener = new TestActionListener(); - newAsyncAction(new DeleteByQueryRequest(), listener).finishHim(null, true, null); - waitForCompletion("waiting for finishHim to complete with timed out = true", listener); - assertNoFailures(listener); - assertNotNull(listener.getResponse()); - assertTrue(listener.getResponse().isTimedOut()); - - listener = new TestActionListener(); - newAsyncAction(new DeleteByQueryRequest(), listener).finishHim(null, false, new Throwable("Fake error")); - waitForCompletion("waiting for finishHim to complete with error", listener); - assertFailure(listener, "Fake error"); - assertNull(listener.getResponse()); - } - - private TransportDeleteByQueryAction.AsyncDeleteByQueryAction newAsyncAction(DeleteByQueryRequest request, TestActionListener listener) { - TransportDeleteByQueryAction action = getInstanceFromNode(TransportDeleteByQueryAction.class); - assertNotNull(action); - return action.new AsyncDeleteByQueryAction(request, listener); - } - - private void waitForCompletion(String testName, final TestActionListener listener) { - logger.info(" --> waiting for delete-by-query [{}] to complete", testName); - try { - awaitBusy(() -> listener.isTerminated()); - } catch (InterruptedException e) { - fail("exception when waiting for delete-by-query [" + testName + "] to complete: " + e.getMessage()); - logger.error("exception when waiting for delete-by-query [{}] to complete", e, testName); - } - } - - private void assertFailure(TestActionListener listener, String expectedMessage) { - Throwable t = listener.getError(); - assertNotNull(t); - assertTrue(Strings.hasText(expectedMessage)); - assertTrue("error message should contain [" + expectedMessage + "] but got [" + t.getMessage() + "]", t.getMessage().contains(expectedMessage)); - } - - private void assertNoFailures(TestActionListener listener) { - assertNull(listener.getError()); - assertTrue(CollectionUtils.isEmpty(listener.getResponse().getShardFailures())); - } - - private void assertSearchContextsClosed() { - NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); - for (NodeStats nodeStat : nodesStats.getNodes()){ - assertThat(nodeStat.getIndices().getSearch().getOpenContexts(), equalTo(0L)); - } - } - - private void assertThrowableContains(Throwable t, String expectedFailure) { - assertThat(t.toString(), containsString(expectedFailure)); - } - - private class TestActionListener implements ActionListener { - private final CountDown count = new CountDown(1); - - private DeleteByQueryResponse response; - private Throwable error; - - @Override - public void onResponse(DeleteByQueryResponse response) { - try { - this.response = response; - } finally { - count.countDown(); - } - } - - @Override - public void onFailure(Throwable e) { - try { - this.error = e; - } finally { - count.countDown(); - } - } - - public boolean isTerminated() { - return count.isCountedDown(); - } - - public DeleteByQueryResponse getResponse() { - return response; - } - - public Throwable getError() { - return error; - } - } -} diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java deleted file mode 100644 index 1245c83444c..00000000000 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/DeleteByQueryTests.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugin.deletebyquery; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; -import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.deletebyquery.DeleteByQueryAction; -import org.elasticsearch.action.deletebyquery.DeleteByQueryRequestBuilder; -import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse; -import org.elasticsearch.action.deletebyquery.IndexDeleteByQueryResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; - -import java.util.Collection; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -@ClusterScope(scope = SUITE, transportClientRatio = 0) -public class DeleteByQueryTests extends ESIntegTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(DeleteByQueryPlugin.class); - } - - public void testDeleteByQueryWithNoSource() { - try { - newDeleteByQuery().get(); - fail("should have thrown a validation exception because of the missing source"); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("source is missing")); - } - } - - public void testDeleteByQueryWithNoIndices() throws Exception { - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setQuery(QueryBuilders.matchAllQuery()); - delete.setIndicesOptions(IndicesOptions.fromOptions(false, true, true, false)); - assertDBQResponse(delete.get(), 0L, 0L, 0L, 0L); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithOneIndex() throws Exception { - final long docs = randomIntBetween(1, 50); - for (int i = 0; i < docs; i++) { - index("test", "test", String.valueOf(i), "fields1", 1); - } - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); - - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setIndices("t*").setQuery(QueryBuilders.matchAllQuery()); - assertDBQResponse(delete.get(), docs, docs, 0L, 0L); - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithMultipleIndices() throws Exception { - final int indices = randomIntBetween(2, 5); - final int docs = randomIntBetween(2, 10) * 2; - long[] candidates = new long[indices]; - - for (int i = 0; i < indices; i++) { - // number of documents to be deleted with the upcoming delete-by-query - // (this number differs for each index) - candidates[i] = randomIntBetween(1, docs); - - for (int j = 0; j < docs; j++) { - boolean candidate = (j < candidates[i]); - index("test-" + i, "test", String.valueOf(j), "candidate", candidate); - } - } - - // total number of expected deletions - long deletions = 0; - for (long i : candidates) { - deletions = deletions + i; - } - refresh(); - - assertHitCount(client().prepareSearch().setSize(0).get(), docs * indices); - for (int i = 0; i < indices; i++) { - assertHitCount(client().prepareSearch("test-" + i).setSize(0).get(), docs); - } - - // Deletes all the documents with candidate=true - DeleteByQueryResponse response = newDeleteByQuery().setIndices("test-*").setQuery(QueryBuilders.termQuery("candidate", true)).get(); - refresh(); - - // Checks that the DBQ response returns the expected number of deletions - assertDBQResponse(response, deletions, deletions, 0L, 0L); - assertNotNull(response.getIndices()); - assertThat(response.getIndices().length, equalTo(indices)); - - for (int i = 0; i < indices; i++) { - String indexName = "test-" + i; - IndexDeleteByQueryResponse indexResponse = response.getIndex(indexName); - assertThat(indexResponse.getFound(), equalTo(candidates[i])); - assertThat(indexResponse.getDeleted(), equalTo(candidates[i])); - assertThat(indexResponse.getFailed(), equalTo(0L)); - assertThat(indexResponse.getMissing(), equalTo(0L)); - assertThat(indexResponse.getIndex(), equalTo(indexName)); - long remaining = docs - candidates[i]; - assertHitCount(client().prepareSearch(indexName).setSize(0).get(), remaining); - } - - assertHitCount(client().prepareSearch().setSize(0).get(), (indices * docs) - deletions); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithMissingIndex() throws Exception { - client().prepareIndex("test", "test") - .setSource(jsonBuilder().startObject().field("field1", 1).endObject()) - .setRefresh(true) - .get(); - assertHitCount(client().prepareSearch().setSize(0).get(), 1); - - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setIndices("test", "missing").setQuery(QueryBuilders.matchAllQuery()); - try { - delete.get(); - fail("should have thrown an exception because of a missing index"); - } catch (IndexNotFoundException e) { - // Ok - } - - delete.setIndicesOptions(IndicesOptions.lenientExpandOpen()); - assertDBQResponse(delete.get(), 1L, 1L, 0L, 0L); - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithTypes() throws Exception { - final long docs = randomIntBetween(1, 50); - for (int i = 0; i < docs; i++) { - index(randomFrom("test1", "test2", "test3"), "type1", String.valueOf(i), "foo", "bar"); - index(randomFrom("test1", "test2", "test3"), "type2", String.valueOf(i), "foo", "bar"); - } - refresh(); - assertHitCount(client().prepareSearch().setSize(0).get(), docs * 2); - assertHitCount(client().prepareSearch().setSize(0).setTypes("type1").get(), docs); - assertHitCount(client().prepareSearch().setSize(0).setTypes("type2").get(), docs); - - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setTypes("type1").setQuery(QueryBuilders.matchAllQuery()); - assertDBQResponse(delete.get(), docs, docs, 0L, 0L); - refresh(); - - assertHitCount(client().prepareSearch().setSize(0).get(), docs); - assertHitCount(client().prepareSearch().setSize(0).setTypes("type1").get(), 0); - assertHitCount(client().prepareSearch().setSize(0).setTypes("type2").get(), docs); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithRouting() throws Exception { - assertAcked(prepareCreate("test").setSettings("number_of_shards", 2)); - ensureGreen("test"); - - final int docs = randomIntBetween(2, 10); - logger.info("--> indexing [{}] documents with routing", docs); - for (int i = 0; i < docs; i++) { - client().prepareIndex("test", "test", String.valueOf(i)).setRouting(String.valueOf(i)).setSource("field1", 1).get(); - } - refresh(); - - logger.info("--> counting documents with no routing, should be equal to [{}]", docs); - assertHitCount(client().prepareSearch().setSize(0).get(), docs); - - String routing = String.valueOf(randomIntBetween(2, docs)); - - logger.info("--> counting documents with routing [{}]", routing); - long expected = client().prepareSearch().setSize(0).setRouting(routing).get().getHits().totalHits(); - - logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setRouting(routing).setQuery(QueryBuilders.matchAllQuery()); - assertDBQResponse(delete.get(), expected, expected, 0L, 0L); - refresh(); - - assertHitCount(client().prepareSearch().setSize(0).get(), docs - expected); - assertSearchContextsClosed(); - } - - public void testDeleteByFieldQuery() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); - - int numDocs = scaledRandomIntBetween(10, 100); - for (int i = 0; i < numDocs; i++) { - client().prepareIndex("test", "test", Integer.toString(i)) - .setRouting(randomAsciiOfLengthBetween(1, 5)) - .setSource("foo", "bar").get(); - } - refresh(); - - int n = between(0, numDocs - 1); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchQuery("_id", Integer.toString(n))).get(), 1); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), numDocs); - - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setIndices("alias").setQuery(QueryBuilders.matchQuery("_id", Integer.toString(n))); - assertDBQResponse(delete.get(), 1L, 1L, 0L, 0L); - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get(), numDocs - 1); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryWithDateMath() throws Exception { - index("test", "type", "1", "d", "2013-01-01"); - ensureGreen(); - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), 1); - - DeleteByQueryRequestBuilder delete = newDeleteByQuery().setIndices("test").setQuery(QueryBuilders.rangeQuery("d").to("now-1h")); - assertDBQResponse(delete.get(), 1L, 1L, 0L, 0L); - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), 0); - assertSearchContextsClosed(); - } - - public void testDeleteByTermQuery() throws Exception { - createIndex("test"); - ensureGreen(); - - int numDocs = scaledRandomIntBetween(10, 50); - IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs + 1]; - for (int i = 0; i < numDocs; i++) { - indexRequestBuilders[i] = client().prepareIndex("test", "test", Integer.toString(i)).setSource("field", "value"); - } - indexRequestBuilders[numDocs] = client().prepareIndex("test", "test", Integer.toString(numDocs)).setSource("field", "other_value"); - indexRandom(true, indexRequestBuilders); - - SearchResponse searchResponse = client().prepareSearch("test").get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo((long) numDocs + 1)); - - DeleteByQueryResponse delete = newDeleteByQuery().setIndices("test").setQuery(QueryBuilders.termQuery("field", "value")).get(); - assertDBQResponse(delete, numDocs, numDocs, 0L, 0L); - - refresh(); - searchResponse = client().prepareSearch("test").get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); - assertSearchContextsClosed(); - } - - public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { - createIndex("test"); - ensureGreen(); - - final Thread[] threads = new Thread[scaledRandomIntBetween(2, 5)]; - final long docs = randomIntBetween(1, 50); - for (int i = 0; i < docs; i++) { - for (int j = 0; j < threads.length; j++) { - index("test", "test", String.valueOf(i * 10 + j), "field", j); - } - } - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), docs * threads.length); - - final CountDownLatch start = new CountDownLatch(1); - final AtomicReference exceptionHolder = new AtomicReference<>(); - - for (int i = 0; i < threads.length; i++) { - final int threadNum = i; - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)).get(), docs); - - Runnable r = new Runnable() { - @Override - public void run() { - try { - start.await(); - - DeleteByQueryResponse rsp = newDeleteByQuery().setQuery(QueryBuilders.termQuery("field", threadNum)).get(); - assertDBQResponse(rsp, docs, docs, 0L, 0L); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (Throwable e) { - exceptionHolder.set(e); - Thread.currentThread().interrupt(); - } - } - }; - threads[i] = new Thread(r); - threads[i].start(); - } - - start.countDown(); - for (Thread thread : threads) { - thread.join(); - } - - Throwable assertionError = exceptionHolder.get(); - if (assertionError != null) { - throw assertionError; - } - - refresh(); - for (int i = 0; i < threads.length; i++) { - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", i)).get(), 0); - } - assertSearchContextsClosed(); - } - - public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { - assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.refresh_interval", -1))); - ensureGreen(); - - final long docs = randomIntBetween(50, 100); - for (int i = 0; i < docs; i++) { - index("test", "test", String.valueOf(i), "foo", "bar"); - } - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); - - final Thread[] threads = new Thread[scaledRandomIntBetween(2, 9)]; - - final CountDownLatch start = new CountDownLatch(1); - final AtomicReference exceptionHolder = new AtomicReference<>(); - - final MatchQueryBuilder query = QueryBuilders.matchQuery("foo", "bar"); - final AtomicLong deleted = new AtomicLong(0); - - for (int i = 0; i < threads.length; i++) { - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(query).get(), docs); - - Runnable r = new Runnable() { - @Override - public void run() { - try { - start.await(); - DeleteByQueryResponse rsp = newDeleteByQuery().setQuery(query).get(); - deleted.addAndGet(rsp.getTotalDeleted()); - - assertThat(rsp.getTotalFound(), equalTo(docs)); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (Throwable e) { - exceptionHolder.set(e); - Thread.currentThread().interrupt(); - } - } - }; - threads[i] = new Thread(r); - threads[i].start(); - } - - start.countDown(); - for (Thread thread : threads) { - thread.join(); - } - refresh(); - - Throwable assertionError = exceptionHolder.get(); - if (assertionError != null) { - throw assertionError; - } - assertHitCount(client().prepareSearch("test").setSize(0).get(), 0L); - assertThat(deleted.get(), equalTo(docs)); - assertSearchContextsClosed(); - } - - public void testDeleteByQueryOnReadOnlyIndex() throws Exception { - createIndex("test"); - ensureGreen(); - - final long docs = randomIntBetween(1, 50); - for (int i = 0; i < docs; i++) { - index("test", "test", String.valueOf(i), "field", 1); - } - refresh(); - assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); - - try { - enableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); - DeleteByQueryResponse rsp = newDeleteByQuery().setQuery(QueryBuilders.matchAllQuery()).get(); - assertDBQResponse(rsp, docs, 0L, docs, 0L); - } finally { - disableIndexBlock("test", IndexMetaData.SETTING_READ_ONLY); - } - - assertHitCount(client().prepareSearch("test").setSize(0).get(), docs); - assertSearchContextsClosed(); - } - - private DeleteByQueryRequestBuilder newDeleteByQuery() { - return new DeleteByQueryRequestBuilder(client(), DeleteByQueryAction.INSTANCE); - } - - private void assertDBQResponse(DeleteByQueryResponse response, long found, long deleted, long failed, long missing) { - assertNotNull(response); - assertThat(response.isTimedOut(), equalTo(false)); - assertThat(response.getShardFailures().length, equalTo(0)); - assertThat(response.getTotalFound(), equalTo(found)); - assertThat(response.getTotalDeleted(), equalTo(deleted)); - assertThat(response.getTotalFailed(), equalTo(failed)); - assertThat(response.getTotalMissing(), equalTo(missing)); - } - - private void assertSearchContextsClosed() throws Exception { - // The scroll id (and thus the underlying search context) is cleared in - // an async manner in TransportDeleteByQueryAction. so we need to use - // assertBusy() here to wait for the search context to be released. - assertBusy(new Runnable() { - @Override - public void run() { - NodesStatsResponse nodesStats = client().admin().cluster().prepareNodesStats().setIndices(true).get(); - for (NodeStats nodeStat : nodesStats.getNodes()){ - assertThat(nodeStat.getIndices().getSearch().getOpenContexts(), equalTo(0L)); - } - } - }); - } -} diff --git a/plugins/delete-by-query/src/test/resources/rest-api-spec/api/delete_by_query.json b/plugins/delete-by-query/src/test/resources/rest-api-spec/api/delete_by_query.json deleted file mode 100644 index d8d4446a1b2..00000000000 --- a/plugins/delete-by-query/src/test/resources/rest-api-spec/api/delete_by_query.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "delete_by_query": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-delete-by-query.html", - "methods": ["DELETE"], - "url": { - "path": "/{index}/_query", - "paths": ["/{index}/_query", "/{index}/{type}/_query"], - "parts": { - "index": { - "type" : "list", - "required": true, - "description" : "A comma-separated list of indices to restrict the operation; use `_all` to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of types to restrict the operation" - } - }, - "params": { - "analyzer": { - "type" : "string", - "description" : "The analyzer to use for the query string" - }, - "default_operator": { - "type" : "enum", - "options" : ["AND","OR"], - "default" : "OR", - "description" : "The default operator for query string query (AND or OR)" - }, - "df": { - "type" : "string", - "description" : "The field to use as default where no field prefix is given in the query string" - }, - "ignore_unavailable": { - "type" : "boolean", - "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" - }, - "allow_no_indices": { - "type" : "boolean", - "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" - }, - "expand_wildcards": { - "type" : "enum", - "options" : ["open","closed","none","all"], - "default" : "open", - "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "q": { - "type" : "string", - "description" : "Query in the Lucene query string syntax" - }, - "routing": { - "type" : "string", - "description" : "Specific routing value" - }, - "timeout": { - "type" : "time", - "description" : "Explicit operation timeout" - } - } - }, - "body": { - "description" : "A query to restrict the operation specified with the Query DSL" - } - } -} diff --git a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml deleted file mode 100644 index 063e959a807..00000000000 --- a/plugins/delete-by-query/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml +++ /dev/null @@ -1,54 +0,0 @@ -setup: - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - - do: - index: - index: test_1 - type: test - id: 2 - body: { foo: baz } - - - do: - index: - index: test_1 - type: test - id: 3 - body: { foo: foo } - - - do: - indices.refresh: {} - ---- -"Basic delete_by_query": - - do: - delete_by_query: - index: test_1 - body: - query: - match: - foo: bar - - - do: - indices.refresh: {} - - - do: - count: - index: test_1 - - - match: { count: 2 } - ---- -"Delete_by_query body without query element": - - do: - catch: request - delete_by_query: - index: test_1 - body: - match: - foo: bar - diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index ceddbc2ff44..8f0c641e150 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -20,7 +20,7 @@ import org.elasticsearch.gradle.LoggedExec */ esplugin { - description 'The Azure Discovery plugin allows to use Azure API for the unicast discovery mechanism.' + description 'The Azure Discovery plugin allows to use Azure API for the unicast discovery mechanism' classname 'org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin' } @@ -49,6 +49,10 @@ dependencies { compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.2' compile 'org.codehaus.jackson:jackson-jaxrs:1.9.2' compile 'org.codehaus.jackson:jackson-xc:1.9.2' + + // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, + // and whitelist this hack in JarHell + compile 'javax.xml.bind:jaxb-api:2.2.2' } // needed to be consistent with ssl host checking @@ -121,4 +125,107 @@ thirdPartyAudit.excludes = [ 'org.osgi.framework.SynchronousBundleListener', 'com.sun.xml.fastinfoset.stax.StAXDocumentParser', 'com.sun.xml.fastinfoset.stax.StAXDocumentSerializer', + + // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) + 'javax.xml.bind.Binder', + 'javax.xml.bind.ContextFinder$1', + 'javax.xml.bind.ContextFinder', + 'javax.xml.bind.DataBindingException', + 'javax.xml.bind.DatatypeConverter', + 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', + 'javax.xml.bind.DatatypeConverterImpl', + 'javax.xml.bind.DatatypeConverterInterface', + 'javax.xml.bind.Element', + 'javax.xml.bind.GetPropertyAction', + 'javax.xml.bind.JAXB$Cache', + 'javax.xml.bind.JAXB', + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement$GlobalScope', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.JAXBException', + 'javax.xml.bind.JAXBIntrospector', + 'javax.xml.bind.JAXBPermission', + 'javax.xml.bind.MarshalException', + 'javax.xml.bind.Marshaller$Listener', + 'javax.xml.bind.Marshaller', + 'javax.xml.bind.Messages', + 'javax.xml.bind.NotIdentifiableEvent', + 'javax.xml.bind.ParseConversionEvent', + 'javax.xml.bind.PrintConversionEvent', + 'javax.xml.bind.PropertyException', + 'javax.xml.bind.SchemaOutputResolver', + 'javax.xml.bind.TypeConstraintException', + 'javax.xml.bind.UnmarshalException', + 'javax.xml.bind.Unmarshaller$Listener', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.UnmarshallerHandler', + 'javax.xml.bind.ValidationEvent', + 'javax.xml.bind.ValidationEventHandler', + 'javax.xml.bind.ValidationEventLocator', + 'javax.xml.bind.ValidationException', + 'javax.xml.bind.Validator', + 'javax.xml.bind.WhiteSpaceProcessor', + 'javax.xml.bind.annotation.DomHandler', + 'javax.xml.bind.annotation.W3CDomHandler', + 'javax.xml.bind.annotation.XmlAccessOrder', + 'javax.xml.bind.annotation.XmlAccessType', + 'javax.xml.bind.annotation.XmlAccessorOrder', + 'javax.xml.bind.annotation.XmlAccessorType', + 'javax.xml.bind.annotation.XmlAnyAttribute', + 'javax.xml.bind.annotation.XmlAnyElement', + 'javax.xml.bind.annotation.XmlAttachmentRef', + 'javax.xml.bind.annotation.XmlAttribute', + 'javax.xml.bind.annotation.XmlElement$DEFAULT', + 'javax.xml.bind.annotation.XmlElement', + 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', + 'javax.xml.bind.annotation.XmlElementDecl', + 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', + 'javax.xml.bind.annotation.XmlElementRef', + 'javax.xml.bind.annotation.XmlElementRefs', + 'javax.xml.bind.annotation.XmlElementWrapper', + 'javax.xml.bind.annotation.XmlElements', + 'javax.xml.bind.annotation.XmlEnum', + 'javax.xml.bind.annotation.XmlEnumValue', + 'javax.xml.bind.annotation.XmlID', + 'javax.xml.bind.annotation.XmlIDREF', + 'javax.xml.bind.annotation.XmlInlineBinaryData', + 'javax.xml.bind.annotation.XmlList', + 'javax.xml.bind.annotation.XmlMimeType', + 'javax.xml.bind.annotation.XmlMixed', + 'javax.xml.bind.annotation.XmlNs', + 'javax.xml.bind.annotation.XmlNsForm', + 'javax.xml.bind.annotation.XmlRegistry', + 'javax.xml.bind.annotation.XmlRootElement', + 'javax.xml.bind.annotation.XmlSchema', + 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', + 'javax.xml.bind.annotation.XmlSchemaType', + 'javax.xml.bind.annotation.XmlSchemaTypes', + 'javax.xml.bind.annotation.XmlSeeAlso', + 'javax.xml.bind.annotation.XmlTransient', + 'javax.xml.bind.annotation.XmlType$DEFAULT', + 'javax.xml.bind.annotation.XmlType', + 'javax.xml.bind.annotation.XmlValue', + 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', + 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', + 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', + 'javax.xml.bind.annotation.adapters.XmlAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', + 'javax.xml.bind.attachment.AttachmentMarshaller', + 'javax.xml.bind.attachment.AttachmentUnmarshaller', + 'javax.xml.bind.helpers.AbstractMarshallerImpl', + 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', + 'javax.xml.bind.helpers.DefaultValidationEventHandler', + 'javax.xml.bind.helpers.Messages', + 'javax.xml.bind.helpers.NotIdentifiableEventImpl', + 'javax.xml.bind.helpers.ParseConversionEventImpl', + 'javax.xml.bind.helpers.PrintConversionEventImpl', + 'javax.xml.bind.helpers.ValidationEventImpl', + 'javax.xml.bind.helpers.ValidationEventLocatorImpl', + 'javax.xml.bind.util.JAXBResult', + 'javax.xml.bind.util.JAXBSource$1', + 'javax.xml.bind.util.JAXBSource', + 'javax.xml.bind.util.Messages', + 'javax.xml.bind.util.ValidationEventCollector' ] diff --git a/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-azure/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-azure/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 new file mode 100644 index 00000000000..a37e1872389 --- /dev/null +++ b/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 @@ -0,0 +1 @@ +aeb3021ca93dde265796d82015beecdcff95bf09 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-ec2/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-ec2/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-ec2/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-ec2/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 b/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/discovery-gce/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-gce/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/discovery-gce/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-gce/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index e7a48cd1763..3d9545d7aed 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,10 +23,10 @@ esplugin { } versions << [ - 'tika': '1.11', - 'pdfbox': '1.8.10', - 'bouncycastle': '1.52', - 'poi': '3.13' + 'tika': '1.13', + 'pdfbox': '2.0.1', + 'bouncycastle': '1.54', + 'poi': '3.15-beta1' ] dependencies { @@ -44,7 +44,7 @@ dependencies { // Adobe PDF compile "org.apache.pdfbox:pdfbox:${versions.pdfbox}" compile "org.apache.pdfbox:fontbox:${versions.pdfbox}" - compile "org.apache.pdfbox:jempbox:${versions.pdfbox}" + compile "org.apache.pdfbox:jempbox:1.8.12" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "org.bouncycastle:bcmail-jdk15on:${versions.bouncycastle}" compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}" @@ -61,6 +61,9 @@ dependencies { compile 'org.apache.commons:commons-compress:1.10' } +// TODO: stop using LanguageIdentifier... +compileJava.options.compilerArgs << "-Xlint:-deprecation" + forbiddenPatterns { exclude '**/*.docx' exclude '**/*.pdf' @@ -99,9 +102,10 @@ thirdPartyAudit.excludes = [ 'com.drew.metadata.iptc.IptcDirectory', 'com.drew.metadata.jpeg.JpegCommentDirectory', 'com.drew.metadata.jpeg.JpegDirectory', - 'com.drew.metadata.xmp.XmpReader', 'com.github.junrar.Archive', 'com.github.junrar.rarfile.FileHeader', + 'com.google.common.reflect.TypeToken', + 'com.google.gson.Gson', 'com.googlecode.mp4parser.DataSource', 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', @@ -117,6 +121,12 @@ thirdPartyAudit.excludes = [ 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', 'com.googlecode.mp4parser.util.CastUtils', + 'com.graphbuilder.curve.ControlPath', + 'com.graphbuilder.curve.GroupIterator', + 'com.graphbuilder.curve.NURBSpline', + 'com.graphbuilder.curve.ShapeMultiPath', + 'com.graphbuilder.curve.ValueVector', + 'com.graphbuilder.geom.PointFactory', 'com.healthmarketscience.jackcess.Column', 'com.healthmarketscience.jackcess.CryptCodecProvider', 'com.healthmarketscience.jackcess.DataType', @@ -136,12 +146,216 @@ thirdPartyAudit.excludes = [ 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', 'com.healthmarketscience.jackcess.util.OleBlob', 'com.healthmarketscience.jackcess.util.TableIterableBuilder', - 'com.ibm.icu.text.Bidi', - 'com.ibm.icu.text.Normalizer', 'com.jmatio.io.MatFileHeader', 'com.jmatio.io.MatFileReader', 'com.jmatio.types.MLArray', 'com.jmatio.types.MLStructure', + 'com.microsoft.schemas.office.excel.STCF', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1Accel2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AccelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AnchorList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoFillList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoScaleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CFList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CameraList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CancelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CheckedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColoredList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColumnList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DDEList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultSizeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DisabledList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DismissList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropLinesList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropStyleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FirstButtonList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaGroupList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaLinkList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaMacroList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaRangeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaTxbxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HelpList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HorizList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1IncList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1JustLastXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LCTList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ListItemList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MapOCXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MaxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MoveWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiSelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeD2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeDList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PrintObjectList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RecalcAlwaysList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptExtendedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLanguageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLocationList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SecretEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelTypeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SizeWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextHAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextVAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1UIObjList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VScrollList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VTEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValidIdsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VisibleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1WidthMinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2Accel2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AccelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AnchorList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoFillList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoScaleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CFList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CameraList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CancelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CheckedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColoredList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColumnList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DDEList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultSizeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DisabledList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DismissList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropLinesList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropStyleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FirstButtonList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaGroupList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaLinkList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaMacroList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaRangeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaTxbxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HelpList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HorizList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2IncList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2JustLastXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LCTList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ListItemList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MapOCXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MaxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MoveWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiSelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeD2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeDList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PrintObjectList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RecalcAlwaysList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptExtendedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLanguageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLocationList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SecretEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelTypeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SizeWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextHAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextVAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2UIObjList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VScrollList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VTEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValidIdsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VisibleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2WidthMinList', + 'com.microsoft.schemas.office.office.CTCallout', + 'com.microsoft.schemas.office.office.CTClipPath', + 'com.microsoft.schemas.office.office.CTComplex', + 'com.microsoft.schemas.office.office.CTDiagram', + 'com.microsoft.schemas.office.office.CTExtrusion', + 'com.microsoft.schemas.office.office.CTFill', + 'com.microsoft.schemas.office.office.CTInk', + 'com.microsoft.schemas.office.office.CTRegroupTable', + 'com.microsoft.schemas.office.office.CTRules', + 'com.microsoft.schemas.office.office.CTSignatureLine', + 'com.microsoft.schemas.office.office.CTSkew', + 'com.microsoft.schemas.office.office.CTStrokeChild', + 'com.microsoft.schemas.office.office.STBWMode', + 'com.microsoft.schemas.office.office.STConnectorType', + 'com.microsoft.schemas.office.office.STHrAlign', + 'com.microsoft.schemas.office.office.STRelationshipId', + 'com.microsoft.schemas.office.office.STTrueFalse', + 'com.microsoft.schemas.office.office.STTrueFalseBlank', + 'com.microsoft.schemas.office.powerpoint.CTEmpty', + 'com.microsoft.schemas.office.powerpoint.CTRel', + 'com.microsoft.schemas.office.visio.x2012.main.AttachedToolbarsType', + 'com.microsoft.schemas.office.visio.x2012.main.ColorsType', + 'com.microsoft.schemas.office.visio.x2012.main.ConnectType', + 'com.microsoft.schemas.office.visio.x2012.main.ConnectsType', + 'com.microsoft.schemas.office.visio.x2012.main.CpType', + 'com.microsoft.schemas.office.visio.x2012.main.CustomMenusFileType', + 'com.microsoft.schemas.office.visio.x2012.main.CustomToolbarsFileType', + 'com.microsoft.schemas.office.visio.x2012.main.DataType', + 'com.microsoft.schemas.office.visio.x2012.main.DocumentSheetType', + 'com.microsoft.schemas.office.visio.x2012.main.DynamicGridEnabledType', + 'com.microsoft.schemas.office.visio.x2012.main.EventListType', + 'com.microsoft.schemas.office.visio.x2012.main.FaceNamesType', + 'com.microsoft.schemas.office.visio.x2012.main.FldType', + 'com.microsoft.schemas.office.visio.x2012.main.ForeignDataType', + 'com.microsoft.schemas.office.visio.x2012.main.GlueSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.HeaderFooterType', + 'com.microsoft.schemas.office.visio.x2012.main.IconType', + 'com.microsoft.schemas.office.visio.x2012.main.MasterShortcutType', + 'com.microsoft.schemas.office.visio.x2012.main.PpType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectBkgndsType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectMastersType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectShapesType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectStylesType', + 'com.microsoft.schemas.office.visio.x2012.main.PublishSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.RefByType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapAnglesType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapExtensionsType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.TpType', + 'com.microsoft.schemas.office.visio.x2012.main.TriggerType', + 'com.microsoft.schemas.office.visio.x2012.main.impl.CellTypeImpl$1RefByList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterShortcutList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.PagesTypeImpl$1PageList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1RowList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.ShapesTypeImpl$1ShapeList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1SectionList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.StyleSheetsTypeImpl$1StyleSheetList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1CpList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1FldList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1PpList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1TpList', + 'com.microsoft.schemas.office.word.CTAnchorLock', + 'com.microsoft.schemas.office.word.CTBorder', + 'com.microsoft.schemas.office.word.CTWrap', 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', @@ -149,14 +363,118 @@ thirdPartyAudit.excludes = [ 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', 'com.microsoft.schemas.office.x2006.digsig.STVersion', + 'com.microsoft.schemas.vml.CTArc', + 'com.microsoft.schemas.vml.CTCurve', + 'com.microsoft.schemas.vml.CTImage', + 'com.microsoft.schemas.vml.CTImageData', + 'com.microsoft.schemas.vml.CTLine', + 'com.microsoft.schemas.vml.CTOval', + 'com.microsoft.schemas.vml.CTPolyLine', + 'com.microsoft.schemas.vml.CTRect', + 'com.microsoft.schemas.vml.CTRoundRect', + 'com.microsoft.schemas.vml.STEditAs', + 'com.microsoft.schemas.vml.STFillMethod', + 'com.microsoft.schemas.vml.STFillType', + 'com.microsoft.schemas.vml.STImageAspect', + 'com.microsoft.schemas.vml.STShadowType', + 'com.microsoft.schemas.vml.STStrokeArrowLength', + 'com.microsoft.schemas.vml.STStrokeArrowType', + 'com.microsoft.schemas.vml.STStrokeArrowWidth', + 'com.microsoft.schemas.vml.STStrokeEndCap', + 'com.microsoft.schemas.vml.STStrokeLineStyle', + 'com.microsoft.schemas.vml.STTrueFalseBlank', + 'com.microsoft.schemas.vml.impl.CTFormulasImpl$1FList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ArcList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClientDataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CurveList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1DiagramList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1GroupList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImageList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1OvalList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PolylineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RectList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RoundrectList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapetypeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1WrapList', + 'com.microsoft.schemas.vml.impl.CTHandlesImpl$1HList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1InkList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1IscommentList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1WrapList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClientDataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1WrapList', 'com.pff.PSTAttachment', 'com.pff.PSTFile', 'com.pff.PSTFolder', 'com.pff.PSTMessage', - 'com.sun.syndication.feed.synd.SyndContent', - 'com.sun.syndication.feed.synd.SyndEntry', - 'com.sun.syndication.feed.synd.SyndFeed', - 'com.sun.syndication.io.SyndFeedInput', + 'com.rometools.rome.feed.synd.SyndContent', + 'com.rometools.rome.feed.synd.SyndEntry', + 'com.rometools.rome.feed.synd.SyndFeed', + 'com.rometools.rome.io.SyndFeedInput', 'com.uwyn.jhighlight.renderer.Renderer', 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', 'de.l3s.boilerpipe.BoilerpipeExtractor', @@ -202,6 +520,13 @@ thirdPartyAudit.excludes = [ 'org.apache.cxf.jaxrs.ext.multipart.Attachment', 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', + 'org.apache.http.HttpEntity', + 'org.apache.http.HttpResponse', + 'org.apache.http.StatusLine', + 'org.apache.http.client.HttpClient', + 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.utils.URIBuilder', + 'org.apache.http.impl.client.DefaultHttpClient', 'org.apache.james.mime4j.MimeException', 'org.apache.james.mime4j.codec.DecodeMonitor', 'org.apache.james.mime4j.codec.DecoderUtil', @@ -227,6 +552,7 @@ thirdPartyAudit.excludes = [ 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', + 'org.apache.pdfbox.tools.imageio.ImageIOUtil', 'org.apache.sis.internal.util.CheckedArrayList', 'org.apache.sis.internal.util.CheckedHashSet', 'org.apache.sis.metadata.iso.DefaultMetadata', @@ -263,7 +589,6 @@ thirdPartyAudit.excludes = [ 'org.apache.xml.security.Init', 'org.apache.xml.security.c14n.Canonicalizer', 'org.apache.xml.security.utils.Base64', - 'org.bouncycastle.asn1.DERObject', 'org.etsi.uri.x01903.v13.AnyType', 'org.etsi.uri.x01903.v13.ClaimedRolesListType', 'org.etsi.uri.x01903.v13.CounterSignatureType', @@ -312,7 +637,7 @@ thirdPartyAudit.excludes = [ 'org.json.XML', 'org.json.simple.JSONArray', 'org.json.simple.JSONObject', - 'org.json.simple.JSONValue', + 'org.json.simple.parser.JSONParser', 'org.junit.Test', 'org.junit.internal.TextListener', 'org.junit.runner.JUnitCore', @@ -812,8 +1137,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1GraphicFrameList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1PicList', 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', @@ -853,7 +1176,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTIgnoredErrors', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', @@ -878,7 +1200,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyleInfo', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyles', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', @@ -908,7 +1229,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCalcChainImpl$1CList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', @@ -916,7 +1236,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColsImpl$1ColList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', @@ -1023,7 +1342,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTHighlight', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', @@ -1265,7 +1583,6 @@ thirdPartyAudit.excludes = [ 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', @@ -1696,6 +2013,8 @@ thirdPartyAudit.excludes = [ 'org.osgi.framework.ServiceRegistration', 'org.osgi.util.tracker.ServiceTracker', 'org.osgi.util.tracker.ServiceTrackerCustomizer', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', 'org.sqlite.SQLiteConfig', 'org.tukaani.xz.ARMOptions', 'org.tukaani.xz.ARMThumbOptions', @@ -1722,273 +2041,11 @@ thirdPartyAudit.excludes = [ 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', - 'schemasMicrosoftComOfficeExcel.STCF', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1Accel2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AccelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AnchorList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoFillList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoScaleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CFList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CameraList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CancelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CheckedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColoredList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColumnList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DDEList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultSizeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DisabledList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DismissList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropLinesList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropStyleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FirstButtonList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaGroupList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaLinkList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaMacroList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaRangeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaTxbxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HelpList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HorizList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1IncList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1JustLastXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LCTList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ListItemList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MapOCXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MaxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MoveWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiSelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeD2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeDList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PrintObjectList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RecalcAlwaysList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptExtendedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLanguageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLocationList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SecretEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelTypeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SizeWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextHAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextVAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1UIObjList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VScrollList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VTEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValidIdsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VisibleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1WidthMinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2Accel2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AccelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AnchorList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoFillList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoScaleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CFList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CameraList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CancelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CheckedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColoredList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColumnList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DDEList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultSizeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DisabledList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DismissList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropLinesList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropStyleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FirstButtonList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaGroupList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaLinkList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaMacroList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaRangeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaTxbxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HelpList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HorizList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2IncList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2JustLastXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LCTList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ListItemList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MapOCXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MaxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MoveWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiSelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeD2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeDList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PrintObjectList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RecalcAlwaysList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptExtendedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLanguageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLocationList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SecretEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelTypeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SizeWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextHAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextVAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2UIObjList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VScrollList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VTEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValidIdsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VisibleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2WidthMinList', - 'schemasMicrosoftComOfficeOffice.CTCallout', - 'schemasMicrosoftComOfficeOffice.CTClipPath', - 'schemasMicrosoftComOfficeOffice.CTComplex', - 'schemasMicrosoftComOfficeOffice.CTDiagram', - 'schemasMicrosoftComOfficeOffice.CTExtrusion', - 'schemasMicrosoftComOfficeOffice.CTFill', - 'schemasMicrosoftComOfficeOffice.CTInk', - 'schemasMicrosoftComOfficeOffice.CTRegroupTable', - 'schemasMicrosoftComOfficeOffice.CTRules', - 'schemasMicrosoftComOfficeOffice.CTSignatureLine', - 'schemasMicrosoftComOfficeOffice.CTSkew', - 'schemasMicrosoftComOfficeOffice.CTStrokeChild', - 'schemasMicrosoftComOfficeOffice.STBWMode', - 'schemasMicrosoftComOfficeOffice.STConnectorType', - 'schemasMicrosoftComOfficeOffice.STHrAlign', - 'schemasMicrosoftComOfficeOffice.STRelationshipId', - 'schemasMicrosoftComOfficeOffice.STTrueFalse', - 'schemasMicrosoftComOfficeOffice.STTrueFalseBlank', - 'schemasMicrosoftComOfficePowerpoint.CTEmpty', - 'schemasMicrosoftComOfficePowerpoint.CTRel', - 'schemasMicrosoftComOfficeWord.CTAnchorLock', - 'schemasMicrosoftComOfficeWord.CTBorder', - 'schemasMicrosoftComOfficeWord.CTWrap', - 'schemasMicrosoftComVml.CTArc', - 'schemasMicrosoftComVml.CTCurve', - 'schemasMicrosoftComVml.CTImage', - 'schemasMicrosoftComVml.CTImageData', - 'schemasMicrosoftComVml.CTLine', - 'schemasMicrosoftComVml.CTOval', - 'schemasMicrosoftComVml.CTPolyLine', - 'schemasMicrosoftComVml.CTRect', - 'schemasMicrosoftComVml.CTRoundRect', - 'schemasMicrosoftComVml.STEditAs', - 'schemasMicrosoftComVml.STFillMethod', - 'schemasMicrosoftComVml.STFillType', - 'schemasMicrosoftComVml.STImageAspect', - 'schemasMicrosoftComVml.STShadowType', - 'schemasMicrosoftComVml.STStrokeArrowLength', - 'schemasMicrosoftComVml.STStrokeArrowType', - 'schemasMicrosoftComVml.STStrokeArrowWidth', - 'schemasMicrosoftComVml.STStrokeEndCap', - 'schemasMicrosoftComVml.STStrokeLineStyle', - 'schemasMicrosoftComVml.STTrueFalseBlank', - 'schemasMicrosoftComVml.impl.CTFormulasImpl$1FList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ArcList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClientDataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1CurveList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1DiagramList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1GroupList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImageList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1LineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1OvalList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1PolylineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1RectList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1RoundrectList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapetypeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1WrapList', - 'schemasMicrosoftComVml.impl.CTHandlesImpl$1HList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1InkList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1IscommentList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1WrapList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClientDataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1WrapList', 'ucar.ma2.DataType', 'ucar.nc2.Attribute', 'ucar.nc2.Dimension', 'ucar.nc2.Group', 'ucar.nc2.NetcdfFile', 'ucar.nc2.Variable', - 'ucar.nc2.dataset.NetcdfDataset', + 'ucar.nc2.dataset.NetcdfDataset' ] diff --git a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.52.jar.sha1 b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.52.jar.sha1 deleted file mode 100644 index de084c948f4..00000000000 --- a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4995a870400e1554d1c7ed2afcb5d198fae12db9 diff --git a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.54.jar.sha1 b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..79da45c5c42 --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +9d9b5432b4b29ef4a853223bc6e19379ef116cca \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.52.jar.sha1 b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.52.jar.sha1 deleted file mode 100644 index 489ceeaaf36..00000000000 --- a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b8ffac2bbc6626f86909589c8cc63637cc936504 diff --git a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.54.jar.sha1 b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..2d0c3cf4e27 --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +b11bfee99bb11eea344de6e4a07fe89212c55c02 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.52.jar.sha1 b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.52.jar.sha1 deleted file mode 100644 index 14ecc1be40b..00000000000 --- a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -88a941faf9819d371e3174b5ed56a3f3f7d73269 diff --git a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.54.jar.sha1 b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..fcda646b42a --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +1acdedeb89f1d950d67b73d481eb7736df65eedb \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-1.8.10.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-1.8.10.jar.sha1 deleted file mode 100644 index ce7f9f5d49c..00000000000 --- a/plugins/ingest-attachment/licenses/fontbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -41776c7713e3f3a1ce688bd96459fc597298c340 diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.1.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.1.jar.sha1 new file mode 100644 index 00000000000..0668199b242 --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.1.jar.sha1 @@ -0,0 +1 @@ +b9d4f0993e015f3f1ce0be9e7300cf62dd7a7f15 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/jempbox-1.8.10.jar.sha1 b/plugins/ingest-attachment/licenses/jempbox-1.8.10.jar.sha1 deleted file mode 100644 index 5a7b1997208..00000000000 --- a/plugins/ingest-attachment/licenses/jempbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40df4e4ca884aadc20b82d5abd0a3679774c55a6 diff --git a/plugins/ingest-attachment/licenses/jempbox-1.8.12.jar.sha1 b/plugins/ingest-attachment/licenses/jempbox-1.8.12.jar.sha1 new file mode 100644 index 00000000000..0e3dcf4573b --- /dev/null +++ b/plugins/ingest-attachment/licenses/jempbox-1.8.12.jar.sha1 @@ -0,0 +1 @@ +426450c573c19f6f2c751a7a52c11931b712c9f6 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-1.8.10.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-1.8.10.jar.sha1 deleted file mode 100644 index 98ce1f9d98c..00000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bc5d1254495be36d0a3b3d6c35f88d05200b9311 diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.1.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.1.jar.sha1 new file mode 100644 index 00000000000..1014db34044 --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.1.jar.sha1 @@ -0,0 +1 @@ +dbc69649118b7eff278f228c070a40ee559e1f62 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-3.13.jar.sha1 b/plugins/ingest-attachment/licenses/poi-3.13.jar.sha1 deleted file mode 100644 index 09063c1e5e0..00000000000 --- a/plugins/ingest-attachment/licenses/poi-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f59f504ba8c521e61e25f417ec652fd485010f3 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-3.15-beta1.jar.sha1 b/plugins/ingest-attachment/licenses/poi-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..6049604dd97 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +048bb8326b81323631d9ceb4236cfbd382e56da2 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-3.13.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-3.13.jar.sha1 deleted file mode 100644 index 16784299855..00000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c364a8f5422d613e3a56db3b4b889f2989d7ee73 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-3.15-beta1.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..c3cf49d9246 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +81085a47fdf0d74d473d605c6b3784e26731842e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.13.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.13.jar.sha1 deleted file mode 100644 index b5a3a05c489..00000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56fb0b9f3ffc3d7f7fc9b59e17b5fa2c3ab921e7 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..afd3b676d08 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +f8bc979ad79908a99483337f1ca2edf78558ac20 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-3.13.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-3.13.jar.sha1 deleted file mode 100644 index cc61780e2a5..00000000000 --- a/plugins/ingest-attachment/licenses/poi-scratchpad-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09d763275e6c7fa05d47e2581606748669e88c55 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-3.15-beta1.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..7056a9fa49e --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-scratchpad-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +f4e276aaf97a60a1156388c9e38069122b7ea914 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.11.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.11.jar.sha1 deleted file mode 100644 index a6dfd778a9c..00000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d37a6b9080c8361e47b2050f69833fd61501ede9 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 new file mode 100644 index 00000000000..cfc36a450bd --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.13.jar.sha1 @@ -0,0 +1 @@ +1305c798d41d1d7bbf12cb7c0ca184c98eed25ad \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.11.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.11.jar.sha1 deleted file mode 100644 index fbbd59efaf9..00000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -355dc05d842ed223fc682da472229473ba706d68 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 new file mode 100644 index 00000000000..7fb2755d545 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.13.jar.sha1 @@ -0,0 +1 @@ +374fde67b9d35f785534b0e6c4953533c31bab5f \ No newline at end of file diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 945c5d34790..56e72be351e 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.ingest.attachment; import org.apache.commons.io.IOUtils; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Base64; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.core.IngestDocument; import org.elasticsearch.test.ESTestCase; @@ -30,6 +29,7 @@ import org.junit.Before; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; +import java.util.Base64; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; @@ -209,7 +209,7 @@ public class AttachmentProcessorTests extends ESTestCase { String path = "/org/elasticsearch/ingest/attachment/test/sample-files/" + filename; try (InputStream is = AttachmentProcessorTests.class.getResourceAsStream(path)) { byte bytes[] = IOUtils.toByteArray(is); - return Base64.encodeBytes(bytes); + return Base64.getEncoder().encodeToString(bytes); } } } diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index 340832af6f8..b00f61867e9 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -23,10 +23,10 @@ esplugin { } versions << [ - 'tika': '1.11', - 'pdfbox': '1.8.10', - 'bouncycastle': '1.52', - 'poi': '3.13' + 'tika': '1.13', + 'pdfbox': '2.0.1', + 'bouncycastle': '1.54', + 'poi': '3.15-beta1' ] dependencies { @@ -44,7 +44,7 @@ dependencies { // Adobe PDF compile "org.apache.pdfbox:pdfbox:${versions.pdfbox}" compile "org.apache.pdfbox:fontbox:${versions.pdfbox}" - compile "org.apache.pdfbox:jempbox:${versions.pdfbox}" + compile "org.apache.pdfbox:jempbox:1.8.12" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "org.bouncycastle:bcmail-jdk15on:${versions.bouncycastle}" compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}" @@ -61,6 +61,9 @@ dependencies { compile 'org.apache.commons:commons-compress:1.10' } +// TODO: stop using LanguageIdentifier... +compileJava.options.compilerArgs << "-Xlint:-deprecation" + forbiddenPatterns { exclude '**/*.docx' exclude '**/*.pdf' @@ -69,1926 +72,1980 @@ forbiddenPatterns { thirdPartyAudit.excludes = [ // classes are missing: some due to our whitelisting of parsers - 'com.coremedia.iso.IsoFile', - 'com.coremedia.iso.boxes.Box', - 'com.coremedia.iso.boxes.Container', - 'com.coremedia.iso.boxes.FileTypeBox', - 'com.coremedia.iso.boxes.MetaBox', - 'com.coremedia.iso.boxes.MovieBox', - 'com.coremedia.iso.boxes.MovieHeaderBox', - 'com.coremedia.iso.boxes.SampleTableBox', - 'com.coremedia.iso.boxes.TrackBox', - 'com.coremedia.iso.boxes.TrackHeaderBox', - 'com.coremedia.iso.boxes.UserDataBox', - 'com.coremedia.iso.boxes.apple.AppleItemListBox', - 'com.coremedia.iso.boxes.sampleentry.AudioSampleEntry', - 'com.drew.imaging.jpeg.JpegMetadataReader', - 'com.drew.imaging.tiff.TiffMetadataReader', - 'com.drew.imaging.webp.WebpMetadataReader', - 'com.drew.lang.ByteArrayReader', - 'com.drew.lang.GeoLocation', - 'com.drew.lang.Rational', - 'com.drew.metadata.Directory', - 'com.drew.metadata.Metadata', - 'com.drew.metadata.Tag', - 'com.drew.metadata.exif.ExifIFD0Directory', - 'com.drew.metadata.exif.ExifReader', - 'com.drew.metadata.exif.ExifSubIFDDirectory', - 'com.drew.metadata.exif.ExifThumbnailDirectory', - 'com.drew.metadata.exif.GpsDirectory', - 'com.drew.metadata.iptc.IptcDirectory', - 'com.drew.metadata.jpeg.JpegCommentDirectory', - 'com.drew.metadata.jpeg.JpegDirectory', - 'com.drew.metadata.xmp.XmpReader', - 'com.github.junrar.Archive', - 'com.github.junrar.rarfile.FileHeader', - 'com.googlecode.mp4parser.DataSource', - 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', - 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', - 'com.googlecode.mp4parser.boxes.apple.AppleArtistBox', - 'com.googlecode.mp4parser.boxes.apple.AppleCommentBox', - 'com.googlecode.mp4parser.boxes.apple.AppleCompilationBox', - 'com.googlecode.mp4parser.boxes.apple.AppleDiskNumberBox', - 'com.googlecode.mp4parser.boxes.apple.AppleEncoderBox', - 'com.googlecode.mp4parser.boxes.apple.AppleGenreBox', - 'com.googlecode.mp4parser.boxes.apple.AppleNameBox', - 'com.googlecode.mp4parser.boxes.apple.AppleRecordingYear2Box', - 'com.googlecode.mp4parser.boxes.apple.AppleTrackAuthorBox', - 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', - 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', - 'com.googlecode.mp4parser.util.CastUtils', - 'com.healthmarketscience.jackcess.Column', - 'com.healthmarketscience.jackcess.CryptCodecProvider', - 'com.healthmarketscience.jackcess.DataType', - 'com.healthmarketscience.jackcess.Database', - 'com.healthmarketscience.jackcess.DatabaseBuilder', - 'com.healthmarketscience.jackcess.PropertyMap$Property', - 'com.healthmarketscience.jackcess.PropertyMap', - 'com.healthmarketscience.jackcess.Row', - 'com.healthmarketscience.jackcess.Table', - 'com.healthmarketscience.jackcess.query.Query', - 'com.healthmarketscience.jackcess.util.LinkResolver', - 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', - 'com.healthmarketscience.jackcess.util.OleBlob$Content', - 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', - 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', - 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', - 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', - 'com.healthmarketscience.jackcess.util.OleBlob', - 'com.healthmarketscience.jackcess.util.TableIterableBuilder', - 'com.ibm.icu.text.Bidi', - 'com.ibm.icu.text.Normalizer', - 'com.jmatio.io.MatFileHeader', - 'com.jmatio.io.MatFileReader', - 'com.jmatio.types.MLArray', - 'com.jmatio.types.MLStructure', - 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', - 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', - 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', - 'com.microsoft.schemas.office.x2006.digsig.STVersion', - 'com.pff.PSTAttachment', - 'com.pff.PSTFile', - 'com.pff.PSTFolder', - 'com.pff.PSTMessage', - 'com.sun.syndication.feed.synd.SyndContent', - 'com.sun.syndication.feed.synd.SyndEntry', - 'com.sun.syndication.feed.synd.SyndFeed', - 'com.sun.syndication.io.SyndFeedInput', - 'com.uwyn.jhighlight.renderer.Renderer', - 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', - 'de.l3s.boilerpipe.BoilerpipeExtractor', - 'de.l3s.boilerpipe.document.TextBlock', - 'de.l3s.boilerpipe.document.TextDocument', - 'de.l3s.boilerpipe.extractors.DefaultExtractor', - 'de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler', - 'javax.mail.BodyPart', - 'javax.mail.Header', - 'javax.mail.Message$RecipientType', - 'javax.mail.MessagingException', - 'javax.mail.Multipart', - 'javax.mail.Part', - 'javax.mail.Session', - 'javax.mail.Transport', - 'javax.mail.internet.ContentType', - 'javax.mail.internet.InternetAddress', - 'javax.mail.internet.InternetHeaders', - 'javax.mail.internet.MimeBodyPart', - 'javax.mail.internet.MimeMessage', - 'javax.mail.internet.MimeMultipart', - 'javax.mail.internet.MimePart', - 'javax.mail.internet.SharedInputStream', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - 'javax.ws.rs.core.Response', - 'junit.framework.TestCase', - 'opennlp.tools.namefind.NameFinderME', - 'opennlp.tools.namefind.TokenNameFinderModel', - 'opennlp.tools.util.Span', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.commons.csv.CSVFormat', - 'org.apache.commons.csv.CSVParser', - 'org.apache.commons.csv.CSVRecord', - 'org.apache.commons.exec.CommandLine', - 'org.apache.commons.exec.DefaultExecutor', - 'org.apache.commons.exec.ExecuteWatchdog', - 'org.apache.commons.exec.PumpStreamHandler', - 'org.apache.commons.exec.environment.EnvironmentUtils', - 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', - 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', - 'org.apache.cxf.jaxrs.client.WebClient', - 'org.apache.cxf.jaxrs.ext.multipart.Attachment', - 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', - 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', - 'org.apache.james.mime4j.MimeException', - 'org.apache.james.mime4j.codec.DecodeMonitor', - 'org.apache.james.mime4j.codec.DecoderUtil', - 'org.apache.james.mime4j.dom.FieldParser', - 'org.apache.james.mime4j.dom.address.Address', - 'org.apache.james.mime4j.dom.address.AddressList', - 'org.apache.james.mime4j.dom.address.Mailbox', - 'org.apache.james.mime4j.dom.address.MailboxList', - 'org.apache.james.mime4j.dom.field.AddressListField', - 'org.apache.james.mime4j.dom.field.DateTimeField', - 'org.apache.james.mime4j.dom.field.MailboxListField', - 'org.apache.james.mime4j.dom.field.ParsedField', - 'org.apache.james.mime4j.dom.field.UnstructuredField', - 'org.apache.james.mime4j.field.LenientFieldParser', - 'org.apache.james.mime4j.parser.ContentHandler', - 'org.apache.james.mime4j.parser.MimeStreamParser', - 'org.apache.james.mime4j.stream.BodyDescriptor', - 'org.apache.james.mime4j.stream.Field', - 'org.apache.james.mime4j.stream.MimeConfig', - 'org.apache.jcp.xml.dsig.internal.dom.DOMDigestMethod', - 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', - 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', - 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', - 'org.apache.sis.internal.util.CheckedArrayList', - 'org.apache.sis.internal.util.CheckedHashSet', - 'org.apache.sis.metadata.iso.DefaultMetadata', - 'org.apache.sis.metadata.iso.DefaultMetadataScope', - 'org.apache.sis.metadata.iso.constraint.DefaultLegalConstraints', - 'org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox', - 'org.apache.sis.metadata.iso.extent.DefaultGeographicDescription', - 'org.apache.sis.metadata.iso.identification.DefaultDataIdentification', - 'org.apache.sis.storage.DataStore', - 'org.apache.sis.storage.DataStores', - 'org.apache.sis.util.collection.CodeListSet', - 'org.apache.tools.ant.BuildException', - 'org.apache.tools.ant.FileScanner', - 'org.apache.tools.ant.Project', - 'org.apache.tools.ant.taskdefs.Jar', - 'org.apache.tools.ant.taskdefs.Javac', - 'org.apache.tools.ant.taskdefs.MatchingTask', - 'org.apache.tools.ant.types.FileSet', - 'org.apache.tools.ant.types.Path$PathElement', - 'org.apache.tools.ant.types.Path', - 'org.apache.tools.ant.types.Reference', - 'org.apache.uima.UIMAFramework', - 'org.apache.uima.analysis_engine.AnalysisEngine', - 'org.apache.uima.cas.Type', - 'org.apache.uima.cas.impl.XCASSerializer', - 'org.apache.uima.cas.impl.XmiCasSerializer', - 'org.apache.uima.cas.impl.XmiSerializationSharedData', - 'org.apache.uima.fit.util.JCasUtil', - 'org.apache.uima.jcas.JCas', - 'org.apache.uima.jcas.cas.FSArray', - 'org.apache.uima.util.XMLInputSource', - 'org.apache.uima.util.XMLParser', - 'org.apache.uima.util.XmlCasSerializer', - 'org.apache.xml.security.Init', - 'org.apache.xml.security.c14n.Canonicalizer', - 'org.apache.xml.security.utils.Base64', - 'org.bouncycastle.asn1.DERObject', - 'org.etsi.uri.x01903.v13.AnyType', - 'org.etsi.uri.x01903.v13.ClaimedRolesListType', - 'org.etsi.uri.x01903.v13.CounterSignatureType', - 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', - 'org.etsi.uri.x01903.v13.DataObjectFormatType', - 'org.etsi.uri.x01903.v13.IdentifierType', - 'org.etsi.uri.x01903.v13.IncludeType', - 'org.etsi.uri.x01903.v13.ObjectIdentifierType', - 'org.etsi.uri.x01903.v13.OtherCertStatusRefsType', - 'org.etsi.uri.x01903.v13.OtherCertStatusValuesType', - 'org.etsi.uri.x01903.v13.ReferenceInfoType', - 'org.etsi.uri.x01903.v13.SigPolicyQualifiersListType', - 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', - 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', - 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', - 'org.etsi.uri.x01903.v13.SignerRoleType', - 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', - 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', - 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', - 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', - 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1EncapsulatedX509CertificateList', - 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1OtherCertificateList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1EncapsulatedTimeStampList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1IncludeList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1ReferenceInfoList', - 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1XMLTimeStampList', - 'org.etsi.uri.x01903.v13.impl.OCSPRefsTypeImpl$1OCSPRefList', - 'org.etsi.uri.x01903.v13.impl.OCSPValuesTypeImpl$1EncapsulatedOCSPValueList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1ArchiveTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttrAuthoritiesCertValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeCertificateRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CertificateValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteCertificateRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteRevocationRefsList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CounterSignatureList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RefsOnlyTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RevocationValuesList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SigAndRefsTimeStampList', - 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', - 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', - 'org.etsi.uri.x01903.v14.ValidationDataType', - 'org.json.JSONArray', - 'org.json.JSONObject', - 'org.json.XML', - 'org.json.simple.JSONArray', - 'org.json.simple.JSONObject', - 'org.json.simple.JSONValue', - 'org.junit.Test', - 'org.junit.internal.TextListener', - 'org.junit.runner.JUnitCore', + 'com.coremedia.iso.IsoFile', + 'com.coremedia.iso.boxes.Box', + 'com.coremedia.iso.boxes.Container', + 'com.coremedia.iso.boxes.FileTypeBox', + 'com.coremedia.iso.boxes.MetaBox', + 'com.coremedia.iso.boxes.MovieBox', + 'com.coremedia.iso.boxes.MovieHeaderBox', + 'com.coremedia.iso.boxes.SampleTableBox', + 'com.coremedia.iso.boxes.TrackBox', + 'com.coremedia.iso.boxes.TrackHeaderBox', + 'com.coremedia.iso.boxes.UserDataBox', + 'com.coremedia.iso.boxes.apple.AppleItemListBox', + 'com.coremedia.iso.boxes.sampleentry.AudioSampleEntry', + 'com.drew.imaging.jpeg.JpegMetadataReader', + 'com.drew.imaging.tiff.TiffMetadataReader', + 'com.drew.imaging.webp.WebpMetadataReader', + 'com.drew.lang.ByteArrayReader', + 'com.drew.lang.GeoLocation', + 'com.drew.lang.Rational', + 'com.drew.metadata.Directory', + 'com.drew.metadata.Metadata', + 'com.drew.metadata.Tag', + 'com.drew.metadata.exif.ExifIFD0Directory', + 'com.drew.metadata.exif.ExifReader', + 'com.drew.metadata.exif.ExifSubIFDDirectory', + 'com.drew.metadata.exif.ExifThumbnailDirectory', + 'com.drew.metadata.exif.GpsDirectory', + 'com.drew.metadata.iptc.IptcDirectory', + 'com.drew.metadata.jpeg.JpegCommentDirectory', + 'com.drew.metadata.jpeg.JpegDirectory', + 'com.github.junrar.Archive', + 'com.github.junrar.rarfile.FileHeader', + 'com.google.common.reflect.TypeToken', + 'com.google.gson.Gson', + 'com.googlecode.mp4parser.DataSource', + 'com.googlecode.mp4parser.boxes.apple.AppleAlbumBox', + 'com.googlecode.mp4parser.boxes.apple.AppleArtist2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleArtistBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCommentBox', + 'com.googlecode.mp4parser.boxes.apple.AppleCompilationBox', + 'com.googlecode.mp4parser.boxes.apple.AppleDiskNumberBox', + 'com.googlecode.mp4parser.boxes.apple.AppleEncoderBox', + 'com.googlecode.mp4parser.boxes.apple.AppleGenreBox', + 'com.googlecode.mp4parser.boxes.apple.AppleNameBox', + 'com.googlecode.mp4parser.boxes.apple.AppleRecordingYear2Box', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackAuthorBox', + 'com.googlecode.mp4parser.boxes.apple.AppleTrackNumberBox', + 'com.googlecode.mp4parser.boxes.apple.Utf8AppleDataBox', + 'com.googlecode.mp4parser.util.CastUtils', + 'com.graphbuilder.curve.ControlPath', + 'com.graphbuilder.curve.GroupIterator', + 'com.graphbuilder.curve.NURBSpline', + 'com.graphbuilder.curve.ShapeMultiPath', + 'com.graphbuilder.curve.ValueVector', + 'com.graphbuilder.geom.PointFactory', + 'com.healthmarketscience.jackcess.Column', + 'com.healthmarketscience.jackcess.CryptCodecProvider', + 'com.healthmarketscience.jackcess.DataType', + 'com.healthmarketscience.jackcess.Database', + 'com.healthmarketscience.jackcess.DatabaseBuilder', + 'com.healthmarketscience.jackcess.PropertyMap$Property', + 'com.healthmarketscience.jackcess.PropertyMap', + 'com.healthmarketscience.jackcess.Row', + 'com.healthmarketscience.jackcess.Table', + 'com.healthmarketscience.jackcess.query.Query', + 'com.healthmarketscience.jackcess.util.LinkResolver', + 'com.healthmarketscience.jackcess.util.OleBlob$CompoundContent', + 'com.healthmarketscience.jackcess.util.OleBlob$Content', + 'com.healthmarketscience.jackcess.util.OleBlob$ContentType', + 'com.healthmarketscience.jackcess.util.OleBlob$LinkContent', + 'com.healthmarketscience.jackcess.util.OleBlob$OtherContent', + 'com.healthmarketscience.jackcess.util.OleBlob$SimplePackageContent', + 'com.healthmarketscience.jackcess.util.OleBlob', + 'com.healthmarketscience.jackcess.util.TableIterableBuilder', + 'com.jmatio.io.MatFileHeader', + 'com.jmatio.io.MatFileReader', + 'com.jmatio.types.MLArray', + 'com.jmatio.types.MLStructure', + 'com.microsoft.schemas.office.excel.STCF', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1Accel2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AccelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AnchorList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoFillList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1AutoScaleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CFList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CameraList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CancelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1CheckedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColoredList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ColumnList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DDEList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DefaultSizeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DisabledList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DismissList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropLinesList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DropStyleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1DxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FirstButtonList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaGroupList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaLinkList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaMacroList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaRangeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1FmlaTxbxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HelpList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1HorizList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1IncList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1JustLastXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LCTList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ListItemList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1LockedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MapOCXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MaxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MoveWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1MultiSelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeD2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1NoThreeDList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1PrintObjectList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RecalcAlwaysList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1RowList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptExtendedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLanguageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptLocationList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ScriptTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SecretEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SelTypeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1SizeWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextHAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1TextVAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1UIObjList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VScrollList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VTEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1ValidIdsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1VisibleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$1WidthMinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2Accel2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AccelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AnchorList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoFillList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2AutoScaleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CFList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CameraList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CancelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2CheckedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColoredList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ColumnList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DDEList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DefaultSizeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DisabledList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DismissList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropLinesList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DropStyleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2DxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FirstButtonList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaGroupList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaLinkList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaMacroList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaPictList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaRangeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2FmlaTxbxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HelpList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2HorizList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2IncList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2JustLastXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LCTList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ListItemList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2LockedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MapOCXList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MaxList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MinList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MoveWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiLineList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2MultiSelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeD2List', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2NoThreeDList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2PrintObjectList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RecalcAlwaysList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowHiddenList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2RowList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptExtendedList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLanguageList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptLocationList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ScriptTextList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SecretEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SelTypeList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2SizeWithCellsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextHAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2TextVAlignList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2UIObjList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VScrollList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VTEditList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2ValidIdsList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2VisibleList', + 'com.microsoft.schemas.office.excel.impl.CTClientDataImpl$2WidthMinList', + 'com.microsoft.schemas.office.office.CTCallout', + 'com.microsoft.schemas.office.office.CTClipPath', + 'com.microsoft.schemas.office.office.CTComplex', + 'com.microsoft.schemas.office.office.CTDiagram', + 'com.microsoft.schemas.office.office.CTExtrusion', + 'com.microsoft.schemas.office.office.CTFill', + 'com.microsoft.schemas.office.office.CTInk', + 'com.microsoft.schemas.office.office.CTRegroupTable', + 'com.microsoft.schemas.office.office.CTRules', + 'com.microsoft.schemas.office.office.CTSignatureLine', + 'com.microsoft.schemas.office.office.CTSkew', + 'com.microsoft.schemas.office.office.CTStrokeChild', + 'com.microsoft.schemas.office.office.STBWMode', + 'com.microsoft.schemas.office.office.STConnectorType', + 'com.microsoft.schemas.office.office.STHrAlign', + 'com.microsoft.schemas.office.office.STRelationshipId', + 'com.microsoft.schemas.office.office.STTrueFalse', + 'com.microsoft.schemas.office.office.STTrueFalseBlank', + 'com.microsoft.schemas.office.powerpoint.CTEmpty', + 'com.microsoft.schemas.office.powerpoint.CTRel', + 'com.microsoft.schemas.office.visio.x2012.main.AttachedToolbarsType', + 'com.microsoft.schemas.office.visio.x2012.main.ColorsType', + 'com.microsoft.schemas.office.visio.x2012.main.ConnectType', + 'com.microsoft.schemas.office.visio.x2012.main.ConnectsType', + 'com.microsoft.schemas.office.visio.x2012.main.CpType', + 'com.microsoft.schemas.office.visio.x2012.main.CustomMenusFileType', + 'com.microsoft.schemas.office.visio.x2012.main.CustomToolbarsFileType', + 'com.microsoft.schemas.office.visio.x2012.main.DataType', + 'com.microsoft.schemas.office.visio.x2012.main.DocumentSheetType', + 'com.microsoft.schemas.office.visio.x2012.main.DynamicGridEnabledType', + 'com.microsoft.schemas.office.visio.x2012.main.EventListType', + 'com.microsoft.schemas.office.visio.x2012.main.FaceNamesType', + 'com.microsoft.schemas.office.visio.x2012.main.FldType', + 'com.microsoft.schemas.office.visio.x2012.main.ForeignDataType', + 'com.microsoft.schemas.office.visio.x2012.main.GlueSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.HeaderFooterType', + 'com.microsoft.schemas.office.visio.x2012.main.IconType', + 'com.microsoft.schemas.office.visio.x2012.main.MasterShortcutType', + 'com.microsoft.schemas.office.visio.x2012.main.PpType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectBkgndsType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectMastersType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectShapesType', + 'com.microsoft.schemas.office.visio.x2012.main.ProtectStylesType', + 'com.microsoft.schemas.office.visio.x2012.main.PublishSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.RefByType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapAnglesType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapExtensionsType', + 'com.microsoft.schemas.office.visio.x2012.main.SnapSettingsType', + 'com.microsoft.schemas.office.visio.x2012.main.TpType', + 'com.microsoft.schemas.office.visio.x2012.main.TriggerType', + 'com.microsoft.schemas.office.visio.x2012.main.impl.CellTypeImpl$1RefByList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.MastersTypeImpl$1MasterShortcutList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.PagesTypeImpl$1PageList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.RowTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1RowList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SectionTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.ShapesTypeImpl$1ShapeList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1CellList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1SectionList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.SheetTypeImpl$1TriggerList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.StyleSheetsTypeImpl$1StyleSheetList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1CpList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1FldList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1PpList', + 'com.microsoft.schemas.office.visio.x2012.main.impl.TextTypeImpl$1TpList', + 'com.microsoft.schemas.office.word.CTAnchorLock', + 'com.microsoft.schemas.office.word.CTBorder', + 'com.microsoft.schemas.office.word.CTWrap', + 'com.microsoft.schemas.office.x2006.digsig.STPositiveInteger', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureComments', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureProviderUrl', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureText', + 'com.microsoft.schemas.office.x2006.digsig.STSignatureType', + 'com.microsoft.schemas.office.x2006.digsig.STUniqueIdentifierWithBraces', + 'com.microsoft.schemas.office.x2006.digsig.STVersion', + 'com.microsoft.schemas.vml.CTArc', + 'com.microsoft.schemas.vml.CTCurve', + 'com.microsoft.schemas.vml.CTImage', + 'com.microsoft.schemas.vml.CTImageData', + 'com.microsoft.schemas.vml.CTLine', + 'com.microsoft.schemas.vml.CTOval', + 'com.microsoft.schemas.vml.CTPolyLine', + 'com.microsoft.schemas.vml.CTRect', + 'com.microsoft.schemas.vml.CTRoundRect', + 'com.microsoft.schemas.vml.STEditAs', + 'com.microsoft.schemas.vml.STFillMethod', + 'com.microsoft.schemas.vml.STFillType', + 'com.microsoft.schemas.vml.STImageAspect', + 'com.microsoft.schemas.vml.STShadowType', + 'com.microsoft.schemas.vml.STStrokeArrowLength', + 'com.microsoft.schemas.vml.STStrokeArrowType', + 'com.microsoft.schemas.vml.STStrokeArrowWidth', + 'com.microsoft.schemas.vml.STStrokeEndCap', + 'com.microsoft.schemas.vml.STStrokeLineStyle', + 'com.microsoft.schemas.vml.STTrueFalseBlank', + 'com.microsoft.schemas.vml.impl.CTFormulasImpl$1FList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ArcList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClientDataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1CurveList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1DiagramList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1GroupList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImageList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1OvalList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1PolylineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RectList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1RoundrectList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1ShapetypeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTGroupImpl$1WrapList', + 'com.microsoft.schemas.vml.impl.CTHandlesImpl$1HList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1InkList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1IscommentList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTShapeImpl$1WrapList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1AnchorlockList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderbottomList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderleftList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BorderrightList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1BordertopList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1CalloutList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClientDataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ClippathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ExtrusionList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FillList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1FormulasList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1HandlesList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ImagedataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1LockList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1PathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1ShadowList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SignaturelineList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1SkewList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1StrokeList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextboxList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextdataList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1TextpathList', + 'com.microsoft.schemas.vml.impl.CTShapetypeImpl$1WrapList', + 'com.pff.PSTAttachment', + 'com.pff.PSTFile', + 'com.pff.PSTFolder', + 'com.pff.PSTMessage', + 'com.rometools.rome.feed.synd.SyndContent', + 'com.rometools.rome.feed.synd.SyndEntry', + 'com.rometools.rome.feed.synd.SyndFeed', + 'com.rometools.rome.io.SyndFeedInput', + 'com.uwyn.jhighlight.renderer.Renderer', + 'com.uwyn.jhighlight.renderer.XhtmlRendererFactory', + 'de.l3s.boilerpipe.BoilerpipeExtractor', + 'de.l3s.boilerpipe.document.TextBlock', + 'de.l3s.boilerpipe.document.TextDocument', + 'de.l3s.boilerpipe.extractors.DefaultExtractor', + 'de.l3s.boilerpipe.sax.BoilerpipeHTMLContentHandler', + 'javax.mail.BodyPart', + 'javax.mail.Header', + 'javax.mail.Message$RecipientType', + 'javax.mail.MessagingException', + 'javax.mail.Multipart', + 'javax.mail.Part', + 'javax.mail.Session', + 'javax.mail.Transport', + 'javax.mail.internet.ContentType', + 'javax.mail.internet.InternetAddress', + 'javax.mail.internet.InternetHeaders', + 'javax.mail.internet.MimeBodyPart', + 'javax.mail.internet.MimeMessage', + 'javax.mail.internet.MimeMultipart', + 'javax.mail.internet.MimePart', + 'javax.mail.internet.SharedInputStream', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'javax.ws.rs.core.Response', + 'junit.framework.TestCase', + 'opennlp.tools.namefind.NameFinderME', + 'opennlp.tools.namefind.TokenNameFinderModel', + 'opennlp.tools.util.Span', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.commons.csv.CSVFormat', + 'org.apache.commons.csv.CSVParser', + 'org.apache.commons.csv.CSVRecord', + 'org.apache.commons.exec.CommandLine', + 'org.apache.commons.exec.DefaultExecutor', + 'org.apache.commons.exec.ExecuteWatchdog', + 'org.apache.commons.exec.PumpStreamHandler', + 'org.apache.commons.exec.environment.EnvironmentUtils', + 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', + 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', + 'org.apache.cxf.jaxrs.client.WebClient', + 'org.apache.cxf.jaxrs.ext.multipart.Attachment', + 'org.apache.cxf.jaxrs.ext.multipart.ContentDisposition', + 'org.apache.cxf.jaxrs.ext.multipart.MultipartBody', + 'org.apache.http.HttpEntity', + 'org.apache.http.HttpResponse', + 'org.apache.http.StatusLine', + 'org.apache.http.client.HttpClient', + 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.utils.URIBuilder', + 'org.apache.http.impl.client.DefaultHttpClient', + 'org.apache.james.mime4j.MimeException', + 'org.apache.james.mime4j.codec.DecodeMonitor', + 'org.apache.james.mime4j.codec.DecoderUtil', + 'org.apache.james.mime4j.dom.FieldParser', + 'org.apache.james.mime4j.dom.address.Address', + 'org.apache.james.mime4j.dom.address.AddressList', + 'org.apache.james.mime4j.dom.address.Mailbox', + 'org.apache.james.mime4j.dom.address.MailboxList', + 'org.apache.james.mime4j.dom.field.AddressListField', + 'org.apache.james.mime4j.dom.field.DateTimeField', + 'org.apache.james.mime4j.dom.field.MailboxListField', + 'org.apache.james.mime4j.dom.field.ParsedField', + 'org.apache.james.mime4j.dom.field.UnstructuredField', + 'org.apache.james.mime4j.field.LenientFieldParser', + 'org.apache.james.mime4j.parser.ContentHandler', + 'org.apache.james.mime4j.parser.MimeStreamParser', + 'org.apache.james.mime4j.stream.BodyDescriptor', + 'org.apache.james.mime4j.stream.Field', + 'org.apache.james.mime4j.stream.MimeConfig', + 'org.apache.jcp.xml.dsig.internal.dom.DOMDigestMethod', + 'org.apache.jcp.xml.dsig.internal.dom.DOMKeyInfo', + 'org.apache.jcp.xml.dsig.internal.dom.DOMReference', + 'org.apache.jcp.xml.dsig.internal.dom.DOMSignedInfo', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.pdfbox.tools.imageio.ImageIOUtil', + 'org.apache.sis.internal.util.CheckedArrayList', + 'org.apache.sis.internal.util.CheckedHashSet', + 'org.apache.sis.metadata.iso.DefaultMetadata', + 'org.apache.sis.metadata.iso.DefaultMetadataScope', + 'org.apache.sis.metadata.iso.constraint.DefaultLegalConstraints', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicBoundingBox', + 'org.apache.sis.metadata.iso.extent.DefaultGeographicDescription', + 'org.apache.sis.metadata.iso.identification.DefaultDataIdentification', + 'org.apache.sis.storage.DataStore', + 'org.apache.sis.storage.DataStores', + 'org.apache.sis.util.collection.CodeListSet', + 'org.apache.tools.ant.BuildException', + 'org.apache.tools.ant.FileScanner', + 'org.apache.tools.ant.Project', + 'org.apache.tools.ant.taskdefs.Jar', + 'org.apache.tools.ant.taskdefs.Javac', + 'org.apache.tools.ant.taskdefs.MatchingTask', + 'org.apache.tools.ant.types.FileSet', + 'org.apache.tools.ant.types.Path$PathElement', + 'org.apache.tools.ant.types.Path', + 'org.apache.tools.ant.types.Reference', + 'org.apache.uima.UIMAFramework', + 'org.apache.uima.analysis_engine.AnalysisEngine', + 'org.apache.uima.cas.Type', + 'org.apache.uima.cas.impl.XCASSerializer', + 'org.apache.uima.cas.impl.XmiCasSerializer', + 'org.apache.uima.cas.impl.XmiSerializationSharedData', + 'org.apache.uima.fit.util.JCasUtil', + 'org.apache.uima.jcas.JCas', + 'org.apache.uima.jcas.cas.FSArray', + 'org.apache.uima.util.XMLInputSource', + 'org.apache.uima.util.XMLParser', + 'org.apache.uima.util.XmlCasSerializer', + 'org.apache.xml.security.Init', + 'org.apache.xml.security.c14n.Canonicalizer', + 'org.apache.xml.security.utils.Base64', + 'org.etsi.uri.x01903.v13.AnyType', + 'org.etsi.uri.x01903.v13.ClaimedRolesListType', + 'org.etsi.uri.x01903.v13.CounterSignatureType', + 'org.etsi.uri.x01903.v13.DataObjectFormatType$Factory', + 'org.etsi.uri.x01903.v13.DataObjectFormatType', + 'org.etsi.uri.x01903.v13.IdentifierType', + 'org.etsi.uri.x01903.v13.IncludeType', + 'org.etsi.uri.x01903.v13.ObjectIdentifierType', + 'org.etsi.uri.x01903.v13.OtherCertStatusRefsType', + 'org.etsi.uri.x01903.v13.OtherCertStatusValuesType', + 'org.etsi.uri.x01903.v13.ReferenceInfoType', + 'org.etsi.uri.x01903.v13.SigPolicyQualifiersListType', + 'org.etsi.uri.x01903.v13.SignaturePolicyIdType', + 'org.etsi.uri.x01903.v13.SignatureProductionPlaceType', + 'org.etsi.uri.x01903.v13.SignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.SignerRoleType', + 'org.etsi.uri.x01903.v13.UnsignedDataObjectPropertiesType', + 'org.etsi.uri.x01903.v13.impl.CRLRefsTypeImpl$1CRLRefList', + 'org.etsi.uri.x01903.v13.impl.CRLValuesTypeImpl$1EncapsulatedCRLValueList', + 'org.etsi.uri.x01903.v13.impl.CertIDListTypeImpl$1CertList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1EncapsulatedX509CertificateList', + 'org.etsi.uri.x01903.v13.impl.CertificateValuesTypeImpl$1OtherCertificateList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1EncapsulatedTimeStampList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1IncludeList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1ReferenceInfoList', + 'org.etsi.uri.x01903.v13.impl.GenericTimeStampTypeImpl$1XMLTimeStampList', + 'org.etsi.uri.x01903.v13.impl.OCSPRefsTypeImpl$1OCSPRefList', + 'org.etsi.uri.x01903.v13.impl.OCSPValuesTypeImpl$1EncapsulatedOCSPValueList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1ArchiveTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttrAuthoritiesCertValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1AttributeRevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CertificateValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteCertificateRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CompleteRevocationRefsList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1CounterSignatureList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RefsOnlyTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1RevocationValuesList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SigAndRefsTimeStampList', + 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', + 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', + 'org.etsi.uri.x01903.v14.ValidationDataType', + 'org.json.JSONArray', + 'org.json.JSONObject', + 'org.json.XML', + 'org.json.simple.JSONArray', + 'org.json.simple.JSONObject', + 'org.json.simple.parser.JSONParser', + 'org.junit.Test', + 'org.junit.internal.TextListener', + 'org.junit.runner.JUnitCore', 'org.junit.runner.Result', - 'org.objectweb.asm.AnnotationVisitor', - 'org.objectweb.asm.Attribute', - 'org.objectweb.asm.ClassReader', - 'org.objectweb.asm.ClassVisitor', - 'org.objectweb.asm.FieldVisitor', - 'org.objectweb.asm.MethodVisitor', - 'org.objectweb.asm.Type', - 'org.opengis.metadata.Identifier', - 'org.opengis.metadata.citation.Address', - 'org.opengis.metadata.citation.Citation', - 'org.opengis.metadata.citation.CitationDate', - 'org.opengis.metadata.citation.Contact', - 'org.opengis.metadata.citation.DateType', - 'org.opengis.metadata.citation.OnLineFunction', - 'org.opengis.metadata.citation.OnlineResource', - 'org.opengis.metadata.citation.ResponsibleParty', - 'org.opengis.metadata.citation.Role', - 'org.opengis.metadata.constraint.Restriction', - 'org.opengis.metadata.distribution.DigitalTransferOptions', - 'org.opengis.metadata.distribution.Distribution', - 'org.opengis.metadata.distribution.Distributor', - 'org.opengis.metadata.distribution.Format', - 'org.opengis.metadata.extent.Extent', - 'org.opengis.metadata.identification.Identification', - 'org.opengis.metadata.identification.KeywordType', - 'org.opengis.metadata.identification.Keywords', - 'org.opengis.metadata.identification.Progress', - 'org.opengis.metadata.identification.TopicCategory', - 'org.opengis.metadata.maintenance.ScopeCode', - 'org.opengis.util.InternationalString', + 'org.objectweb.asm.AnnotationVisitor', + 'org.objectweb.asm.Attribute', + 'org.objectweb.asm.ClassReader', + 'org.objectweb.asm.ClassVisitor', + 'org.objectweb.asm.FieldVisitor', + 'org.objectweb.asm.MethodVisitor', + 'org.objectweb.asm.Type', + 'org.opengis.metadata.Identifier', + 'org.opengis.metadata.citation.Address', + 'org.opengis.metadata.citation.Citation', + 'org.opengis.metadata.citation.CitationDate', + 'org.opengis.metadata.citation.Contact', + 'org.opengis.metadata.citation.DateType', + 'org.opengis.metadata.citation.OnLineFunction', + 'org.opengis.metadata.citation.OnlineResource', + 'org.opengis.metadata.citation.ResponsibleParty', + 'org.opengis.metadata.citation.Role', + 'org.opengis.metadata.constraint.Restriction', + 'org.opengis.metadata.distribution.DigitalTransferOptions', + 'org.opengis.metadata.distribution.Distribution', + 'org.opengis.metadata.distribution.Distributor', + 'org.opengis.metadata.distribution.Format', + 'org.opengis.metadata.extent.Extent', + 'org.opengis.metadata.identification.Identification', + 'org.opengis.metadata.identification.KeywordType', + 'org.opengis.metadata.identification.Keywords', + 'org.opengis.metadata.identification.Progress', + 'org.opengis.metadata.identification.TopicCategory', + 'org.opengis.metadata.maintenance.ScopeCode', + 'org.opengis.util.InternationalString', // Missing openxml schema classes are explained by the fact we use the smaller jar: - // "The full jar of all of the schemas is ooxml-schemas-xx.jar, and it is currently around 15mb. - // The smaller poi-ooxml-schemas jar is only about 4mb. + // "The full jar of all of the schemas is ooxml-schemas-xx.jar, and it is currently around 15mb. + // The smaller poi-ooxml-schemas jar is only about 4mb. // This latter jar file only contains the typically used parts though." // http://poi.apache.org/faq.html#faq-N10025 - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAxisUnit', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBarChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTChartLines', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbls', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDateAx', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExternalData', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTGrouping', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblAlgn', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLegendEntry', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMarkerSize', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRadarChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRelId', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSerAx', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', - 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', - 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1SerList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieChartImpl$1SerList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Bar3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BarChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BubbleChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1CatAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DateAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DoughnutChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Line3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1LineChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1OfPieChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Pie3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1PieChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1RadarChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ScatterChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SerAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1StockChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1SerList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', - 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectionSite', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTDashStopList', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectContainer', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGlowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupFillProperties', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTHSLEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinBevel', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPatternFillProperties', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPolarAdjustHandle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTScene3D', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTShape3D', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTablePartStyle', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBlipBullet', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletColorFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletSizeFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletTypefaceFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTileInfoProperties', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', - 'org.openxmlformats.schemas.drawingml.x2006.main.CTXYAdjustHandle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STBlackWhiteMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', - 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', - 'org.openxmlformats.schemas.drawingml.x2006.main.STGuid', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', - 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.STRectAlignment', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTextTabAlignType', - 'org.openxmlformats.schemas.drawingml.x2006.main.STTileFlipMode', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GradFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GrpFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1NoFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1PattFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1SolidFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaBiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaCeilingList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaFloorList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaInvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModFixList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BiLevelList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BlurList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrChangeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrReplList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1DuotoneList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1FillOverlayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1GraysclList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1HslList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GrpFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1NoFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1PattFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1SolidFillList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFontCollectionImpl$1FontList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGeomGuideListImpl$1GdList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGradientStopListImpl$1GsList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTLineStyleListImpl$1LnList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTOfficeArtExtensionListImpl$1ExtList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DCubicBezierToImpl$1PtList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1ArcToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CloseList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CubicBezToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1LnToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1MoveToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1QuadBezToList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DListImpl$1PathList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1CompList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GrayList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvGammaList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatModList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatOffList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableImpl$1TrList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableStyleListImpl$1TblStyleList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextBodyImpl$1PList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', - 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTAbsoluteAnchor', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1CxnSpList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GraphicFrameList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GrpSpList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1PicList', - 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1SpList', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTEffectExtent', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosH', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosV', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapNone', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapSquare', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapThrough', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTight', - 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTopBottom', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTArray', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTCf', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTEmpty', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTNull', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTVstream', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STCy', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STError', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STVectorBaseType', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BoolList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CfList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ClsidList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CyList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1DateList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ErrorList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1FiletimeList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpwstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1VariantList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BoolList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ClsidList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2CyList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2DateList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ErrorList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2FiletimeList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpwstrList', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R8List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui1List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui2List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui4List', - 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui8List', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTAcc', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBar', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBorderBox', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBox', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTD', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTEqArr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTF', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTFunc', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTGroupChr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimLow', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimUpp', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTM', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTMathPr', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTNary', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMath', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMathPara', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTPhant', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTR', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTRad', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSPre', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSub', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSubSup', - 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSup', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTControlList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomShowList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomerData', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTEmbeddedFontList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTHeaderFooter', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTransition', - 'org.openxmlformats.schemas.presentationml.x2006.main.CTSmartTags', - 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', - 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', - 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', - 'org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderSize', - 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1GraphicFrameList', - 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTGroupShapeImpl$1PicList', - 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCacheHierarchies', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedMembers', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellStyles', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellWatches', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetProtection', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColHierarchiesUsage', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColors', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConditionalFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConsolidation', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTControls', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCsPageSetup', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomChartsheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomProperties', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomSheetViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomWorkbookViews', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataBinding', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataConsolidate', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDateTime', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDdeLink', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDimensions', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTError', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExtensionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExternalSheetDataSet', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFieldGroup', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileRecoveryPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileSharing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileVersion', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFilterColumn', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTIgnoredErrors', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTNumber', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleObjects', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPhoneticRun', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTProtectedRanges', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRecord', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowHierarchiesUsage', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTScenarios', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSheetBackgroundPicture', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagPr', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagTypes', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTags', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyleInfo', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyles', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishObjects', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishing', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTX', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STCellSpans', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDataValidationImeMode', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STFieldSortType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STGuid', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STObjects', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticAlignment', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPrintError', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STRefMode', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STSheetViewType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STShowDataAs', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTableType', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTimePeriod', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTotalsRowFunction', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STUpdateLinks', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STVisibility', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$1AuthorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$2AuthorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAutoFilterImpl$1FilterColumnList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBookViewsImpl$1WorkbookViewList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCalcChainImpl$1CList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$2FormulaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColsImpl$1ColList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataValidationsImpl$1DataValidationList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDxfsImpl$1DxfList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalDefinedNamesImpl$1DefinedNameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalReferencesImpl$1ExternalReferenceList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalSheetNamesImpl$1SheetNameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFillsImpl$1FillList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CharsetList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CondenseList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ExtendList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1FamilyList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1IList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1NameList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1OutlineList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SchemeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ShadowList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1StrikeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SzList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1UList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1VertAlignList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontsImpl$1FontList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTHyperlinksImpl$1HyperlinkList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTIconSetImpl$1CfvoList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTItemsImpl$1ItemList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1MapList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1SchemaList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMergeCellsImpl$1MergeCellList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTNumFmtsImpl$1NumFmtList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageBreakImpl$1BrkList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageFieldsImpl$1PageFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCacheRecordsImpl$1RList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCachesImpl$1PivotCacheList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotFieldsImpl$1PivotFieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CharsetList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ColorList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CondenseList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ExtendList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1FamilyList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1IList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1OutlineList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1RFontList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SchemeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ShadowList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1StrikeList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SzList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1UList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1VertAlignList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RPhList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetDataImpl$1RowList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSingleXmlCellsImpl$1SingleXmlCellList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSstImpl$1SiList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTableColumnsImpl$1TableColumnList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTablePartsImpl$1TablePartList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorkbookImpl$1FileRecoveryPrList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ColsList', - 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBackground', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCnf', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColorSchemeMapping', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColumns', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCompat', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTControl', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlBlock', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlCell', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRow', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRun', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDataBinding', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocGrid', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocRsids', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocVars', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEm', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFStatusText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFTextInput', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFitText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTHighlight', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLock', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLongHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvlLegacy', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMacroName', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMailMerge', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMultiLevelType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTNumPicBullet', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageBorders', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageMar', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageSz', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPaperSource', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTParaRPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPerm', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPermStart', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPlaceholder', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTProof', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTReadingModeInkLockDown', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRuby', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSaveThroughXslt', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtComboBox', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDate', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDropDownList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtRow', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtText', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSignedTwipsMeasure', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblLayoutType', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrExChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblStylePr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcMar', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextScale', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangesView', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTwipsMeasure', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTView', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWriteProtection', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHeightRule', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabRelativeTo', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STThemeColor', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTEndnotesImpl$1EndnoteList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EntryMacroList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1ExitMacroList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1HelpTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFootnotesImpl$1FootnoteList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTLatentStylesImpl$1LsdExceptionList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumImpl$1LvlOverrideList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1AbstractNumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1AnnotationRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1BrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CommentReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ContinuationSeparatorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteRefList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1NoBreakHyphenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ObjectList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PgNumList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PictList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PtabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1RubyList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SeparatorList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SoftHyphenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SymList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearLongList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearShortList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1TcList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1AccList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BarList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BorderBoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1EqArrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FuncList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1GroupChrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimLowList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimUppList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1NaryList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PhantList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1R2List', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RadList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SPreList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubSupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1TcList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtEndPrImpl$1RPrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1AliasList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1BibliographyList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1CitationList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ComboBoxList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DataBindingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DateList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartObjList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DropDownListList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1EquationList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1GroupList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1IdList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1LockList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PictureList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PlaceholderList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RPrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RichTextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ShowingPlcHdrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TemporaryList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TextList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1FooterReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1HeaderReferenceList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1ActiveWritingStyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1AttachedSchemaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1SmartTagTypeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagPrImpl$1AttrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1FldSimpleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1HyperlinkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1RList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SmartTagList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SubDocList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStyleImpl$1TblStylePrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStylesImpl$1StyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTabsImpl$1TabList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblGridBaseImpl$1GridColList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1TrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1TblList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CantSplitList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CnfStyleList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1DivIdList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridAfterList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridBeforeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1HiddenList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1JcList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblCellSpacingList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblHeaderList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TrHeightList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WAfterList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WBeforeList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1AltChunkList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1DelList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1InsList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathParaList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermEndList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermStartList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1ProofErrList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1SdtList', - 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1TblList', - 'org.osgi.framework.BundleActivator', - 'org.osgi.framework.BundleContext', - 'org.osgi.framework.ServiceReference', - 'org.osgi.framework.ServiceRegistration', - 'org.osgi.util.tracker.ServiceTracker', - 'org.osgi.util.tracker.ServiceTrackerCustomizer', - 'org.sqlite.SQLiteConfig', - 'org.tukaani.xz.ARMOptions', - 'org.tukaani.xz.ARMThumbOptions', - 'org.tukaani.xz.DeltaOptions', - 'org.tukaani.xz.FilterOptions', - 'org.tukaani.xz.FinishableWrapperOutputStream', - 'org.tukaani.xz.IA64Options', - 'org.tukaani.xz.LZMA2InputStream', - 'org.tukaani.xz.LZMA2Options', - 'org.tukaani.xz.LZMAInputStream', - 'org.tukaani.xz.PowerPCOptions', - 'org.tukaani.xz.SPARCOptions', - 'org.tukaani.xz.SingleXZInputStream', - 'org.tukaani.xz.UnsupportedOptionsException', - 'org.tukaani.xz.X86Options', - 'org.tukaani.xz.XZ', - 'org.tukaani.xz.XZInputStream', - 'org.tukaani.xz.XZOutputStream', - 'org.w3.x2000.x09.xmldsig.KeyInfoType', - 'org.w3.x2000.x09.xmldsig.SignatureMethodType', - 'org.w3.x2000.x09.xmldsig.SignatureValueType', - 'org.w3.x2000.x09.xmldsig.TransformsType', - 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', - 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', - 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', - 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', - 'schemasMicrosoftComOfficeExcel.STCF', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1Accel2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AccelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AnchorList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoFillList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1AutoScaleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CFList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CameraList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CancelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1CheckedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColoredList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ColumnList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DDEList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DefaultSizeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DisabledList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DismissList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropLinesList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DropStyleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1DxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FirstButtonList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaGroupList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaLinkList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaMacroList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaRangeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1FmlaTxbxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HelpList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1HorizList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1IncList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1JustLastXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LCTList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ListItemList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1LockedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MapOCXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MaxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MoveWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1MultiSelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeD2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1NoThreeDList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1PrintObjectList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RecalcAlwaysList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1RowList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptExtendedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLanguageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptLocationList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ScriptTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SecretEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SelTypeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1SizeWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextHAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1TextVAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1UIObjList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VScrollList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VTEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1ValidIdsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1VisibleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$1WidthMinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2Accel2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AccelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AnchorList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoFillList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2AutoScaleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CFList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CameraList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CancelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2CheckedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColoredList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ColumnList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DDEList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DefaultSizeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DisabledList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DismissList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropLinesList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DropStyleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2DxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FirstButtonList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaGroupList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaLinkList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaMacroList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaPictList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaRangeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2FmlaTxbxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HelpList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2HorizList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2IncList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2JustLastXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LCTList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ListItemList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2LockedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MapOCXList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MaxList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MinList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MoveWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiLineList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2MultiSelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeD2List', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2NoThreeDList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2PrintObjectList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RecalcAlwaysList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowHiddenList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2RowList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptExtendedList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLanguageList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptLocationList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ScriptTextList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SecretEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SelTypeList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2SizeWithCellsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextHAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2TextVAlignList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2UIObjList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VScrollList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VTEditList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2ValidIdsList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2VisibleList', - 'schemasMicrosoftComOfficeExcel.impl.CTClientDataImpl$2WidthMinList', - 'schemasMicrosoftComOfficeOffice.CTCallout', - 'schemasMicrosoftComOfficeOffice.CTClipPath', - 'schemasMicrosoftComOfficeOffice.CTComplex', - 'schemasMicrosoftComOfficeOffice.CTDiagram', - 'schemasMicrosoftComOfficeOffice.CTExtrusion', - 'schemasMicrosoftComOfficeOffice.CTFill', - 'schemasMicrosoftComOfficeOffice.CTInk', - 'schemasMicrosoftComOfficeOffice.CTRegroupTable', - 'schemasMicrosoftComOfficeOffice.CTRules', - 'schemasMicrosoftComOfficeOffice.CTSignatureLine', - 'schemasMicrosoftComOfficeOffice.CTSkew', - 'schemasMicrosoftComOfficeOffice.CTStrokeChild', - 'schemasMicrosoftComOfficeOffice.STBWMode', - 'schemasMicrosoftComOfficeOffice.STConnectorType', - 'schemasMicrosoftComOfficeOffice.STHrAlign', - 'schemasMicrosoftComOfficeOffice.STRelationshipId', - 'schemasMicrosoftComOfficeOffice.STTrueFalse', - 'schemasMicrosoftComOfficeOffice.STTrueFalseBlank', - 'schemasMicrosoftComOfficePowerpoint.CTEmpty', - 'schemasMicrosoftComOfficePowerpoint.CTRel', - 'schemasMicrosoftComOfficeWord.CTAnchorLock', - 'schemasMicrosoftComOfficeWord.CTBorder', - 'schemasMicrosoftComOfficeWord.CTWrap', - 'schemasMicrosoftComVml.CTArc', - 'schemasMicrosoftComVml.CTCurve', - 'schemasMicrosoftComVml.CTImage', - 'schemasMicrosoftComVml.CTImageData', - 'schemasMicrosoftComVml.CTLine', - 'schemasMicrosoftComVml.CTOval', - 'schemasMicrosoftComVml.CTPolyLine', - 'schemasMicrosoftComVml.CTRect', - 'schemasMicrosoftComVml.CTRoundRect', - 'schemasMicrosoftComVml.STEditAs', - 'schemasMicrosoftComVml.STFillMethod', - 'schemasMicrosoftComVml.STFillType', - 'schemasMicrosoftComVml.STImageAspect', - 'schemasMicrosoftComVml.STShadowType', - 'schemasMicrosoftComVml.STStrokeArrowLength', - 'schemasMicrosoftComVml.STStrokeArrowType', - 'schemasMicrosoftComVml.STStrokeArrowWidth', - 'schemasMicrosoftComVml.STStrokeEndCap', - 'schemasMicrosoftComVml.STStrokeLineStyle', - 'schemasMicrosoftComVml.STTrueFalseBlank', - 'schemasMicrosoftComVml.impl.CTFormulasImpl$1FList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ArcList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClientDataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1CurveList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1DiagramList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1GroupList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImageList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1LineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1OvalList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1PolylineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1RectList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1RoundrectList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1ShapetypeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTGroupImpl$1WrapList', - 'schemasMicrosoftComVml.impl.CTHandlesImpl$1HList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1InkList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1IscommentList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTShapeImpl$1WrapList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1AnchorlockList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderbottomList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderleftList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BorderrightList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1BordertopList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1CalloutList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClientDataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ClippathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ExtrusionList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FillList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1FormulasList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1HandlesList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ImagedataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1LockList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1PathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1ShadowList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SignaturelineList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1SkewList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1StrokeList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextboxList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextdataList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1TextpathList', - 'schemasMicrosoftComVml.impl.CTShapetypeImpl$1WrapList', - 'ucar.ma2.DataType', - 'ucar.nc2.Attribute', - 'ucar.nc2.Dimension', - 'ucar.nc2.Group', - 'ucar.nc2.NetcdfFile', - 'ucar.nc2.Variable', - 'ucar.nc2.dataset.NetcdfDataset', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTArea3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAreaChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTAxisUnit', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBar3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTBubbleChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTChartLines', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDLbls', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDPt', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDTable', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDateAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispBlanksAs', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDispUnits', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTDoughnutChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTErrBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExtensionList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTExternalData', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTFirstSliceAng', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTGrouping', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblAlgn', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLblOffset', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLegendEntry', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTLine3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMarkerSize', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTMultiLvlStrRef', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTOfPieChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPie3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotFmts', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTPivotSource', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTProtection', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRadarChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTRelId', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSerAx', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSkip', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStockChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTStyle', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurface3DChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTSurfaceChart', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTextLanguageID', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTTrendline', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTUpDownBars', + 'org.openxmlformats.schemas.drawingml.x2006.chart.CTView3D', + 'org.openxmlformats.schemas.drawingml.x2006.chart.STPageSetupOrientation', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLegendImpl$1LegendEntryList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTLineSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTNumDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPieSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Area3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1AreaChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Bar3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1BubbleChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1CatAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DateAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1DoughnutChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Line3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1LineChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1OfPieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Pie3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1PieChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1RadarChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ScatterChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SerAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1StockChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1Surface3DChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1SurfaceChartList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTPlotAreaImpl$1ValAxList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1AxIdList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterChartImpl$1SerList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1DPtList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1ErrBarsList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTScatterSerImpl$1TrendlineList', + 'org.openxmlformats.schemas.drawingml.x2006.chart.impl.CTStrDataImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaCeilingEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaFloorEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaInverseEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaModulateEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAlphaReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioCD', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBiLevelEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTBlurEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCell3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorChangeEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorReplaceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTColorSchemeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTComplementTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectionSite', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTConnectorLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTCustomColorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDashStopList', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTDuotoneEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEffectContainer', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTEmbeddedWAVAudioFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFillOverlayEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTFlatText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGlowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGrayscaleTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTGroupLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTHSLEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInnerShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseGammaTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTInverseTransform', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLineJoinBevel', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTLuminanceEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTObjectStyleDefaults', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPath2DArcTo', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPatternFillProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPolarAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPositiveFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetShadowEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTPresetTextShape', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTQuickTimeFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTReflectionEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTScene3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShape3D', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTShapeLocking', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSoftEdgesEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTSupplementalFont', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTableBackgroundStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTablePartStyle', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBlipBullet', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletColorFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletSizeFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextBulletTypefaceFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineFillGroupWrapper', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTextUnderlineLineFollowText', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTileInfoProperties', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTTintEffect', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTVideoFile', + 'org.openxmlformats.schemas.drawingml.x2006.main.CTXYAdjustHandle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlackWhiteMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STBlipCompression', + 'org.openxmlformats.schemas.drawingml.x2006.main.STFixedAngle', + 'org.openxmlformats.schemas.drawingml.x2006.main.STGuid', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPanose', + 'org.openxmlformats.schemas.drawingml.x2006.main.STPathFillMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.STRectAlignment', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextColumnCount', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextNonNegativePoint', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTextTabAlignType', + 'org.openxmlformats.schemas.drawingml.x2006.main.STTileFlipMode', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhPolarList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTAdjustHandleListImpl$1AhXYList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBackgroundFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaBiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaCeilingList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaFloorList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaInvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModFixList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1AlphaReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BiLevelList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1BlurList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrChangeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1ClrReplList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1DuotoneList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1FillOverlayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1GraysclList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1HslList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTBlipImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTConnectionSiteListImpl$1CxnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTEffectStyleListImpl$1EffectStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1BlipFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GradFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1GrpFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1NoFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1PattFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFillStyleListImpl$1SolidFillList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTFontCollectionImpl$1FontList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGeomGuideListImpl$1GdList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTGradientStopListImpl$1GsList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTHslColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTLineStyleListImpl$1LnList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTOfficeArtExtensionListImpl$1ExtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DCubicBezierToImpl$1PtList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1ArcToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CloseList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1CubicBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1LnToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1MoveToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DImpl$1QuadBezToList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPath2DListImpl$1PathList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTPresetColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSRgbColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSchemeColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1AlphaOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1BlueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1CompList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GrayList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1GreenOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1HueOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvGammaList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1InvList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1LumOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1RedOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatModList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1SatOffList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1ShadeList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTSystemColorImpl$1TintList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableGridImpl$1GridColList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableImpl$1TrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableRowImpl$1TcList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTableStyleListImpl$1TblStyleList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextBodyImpl$1PList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1BrList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1FldList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextParagraphImpl$1RList', + 'org.openxmlformats.schemas.drawingml.x2006.main.impl.CTTextTabStopListImpl$1TabList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.CTAbsoluteAnchor', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1AbsoluteAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1OneCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTDrawingImpl$1TwoCellAnchorList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1CxnSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GraphicFrameList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1GrpSpList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1PicList', + 'org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.impl.CTGroupShapeImpl$1SpList', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTEffectExtent', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosH', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTPosV', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapNone', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapSquare', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapThrough', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTight', + 'org.openxmlformats.schemas.drawingml.x2006.wordprocessingDrawing.CTWrapTopBottom', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTArray', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTCf', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTEmpty', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTNull', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.CTVstream', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STCy', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STError', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.STVectorBaseType', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CfList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$1VariantList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BoolList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2BstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ClsidList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2CyList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2DateList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2ErrorList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2FiletimeList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2I8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2LpwstrList', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2R8List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui1List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui2List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui4List', + 'org.openxmlformats.schemas.officeDocument.x2006.docPropsVTypes.impl.CTVectorImpl$2Ui8List', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTAcc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBar', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBorderBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTBox', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTD', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTEqArr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTF', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTFunc', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTGroupChr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimLow', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTLimUpp', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTM', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTMathPr', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTNary', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMath', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTOMathPara', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTPhant', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTR', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTRad', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSPre', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSub', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSubSup', + 'org.openxmlformats.schemas.officeDocument.x2006.math.CTSSup', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTControlList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomShowList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTCustomerData', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTEmbeddedFontList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTExtensionListModify', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHandoutMasterIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTHeaderFooter', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTModifyVerifier', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTPhotoAlbum', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideLayoutIdList', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTiming', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSlideTransition', + 'org.openxmlformats.schemas.presentationml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.presentationml.x2006.main.STBookmarkIdSeed', + 'org.openxmlformats.schemas.presentationml.x2006.main.STDirection', + 'org.openxmlformats.schemas.presentationml.x2006.main.STIndex', + 'org.openxmlformats.schemas.presentationml.x2006.main.STPlaceholderSize', + 'org.openxmlformats.schemas.presentationml.x2006.main.STSlideSizeType', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentAuthorListImpl$1CmAuthorList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCommentListImpl$1CmList', + 'org.openxmlformats.schemas.presentationml.x2006.main.impl.CTCustomerDataListImpl$1CustDataList', + 'org.openxmlformats.schemas.schemaLibrary.x2006.main.CTSchemaLibrary', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTAutoSortScope', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTBoolean', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCacheHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCalculatedMembers', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCellWatches', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetProtection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTColors', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConditionalFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTConsolidation', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTControls', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCsPageSetup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomChartsheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomProperties', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomSheetViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTCustomWorkbookViews', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDataConsolidate', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDateTime', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDdeLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTDimensions', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExtensionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTExternalSheetDataSet', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFieldGroup', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileRecoveryPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileSharing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFileVersion', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFilterColumn', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFormats', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTFunctionGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTGradientFill', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureDimensionMaps', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMeasureGroups', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTMissing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTNumber', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleLink', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTOleSize', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPCDKPIs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPhoneticRun', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotFilters', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotHierarchies', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTPivotSelection', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTProtectedRanges', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRecord', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowHierarchiesUsage', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTRowItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTScenarios', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSheetBackgroundPicture', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagPr', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTagTypes', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSmartTags', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTSortState', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTString', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableFormula', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTableStyles', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTTupleCache', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishItems', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTWebPublishing', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.CTX', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STCellSpans', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STDataValidationImeMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STFieldSortType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STGuid', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STObjects', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticAlignment', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPhoneticType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STPrintError', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STRefMode', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STSheetViewType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STShowDataAs', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTableType', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTimePeriod', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STTotalsRowFunction', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STUpdateLinks', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.STVisibility', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$1AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAuthorsImpl$2AuthorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTAutoFilterImpl$1FilterColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBookViewsImpl$1WorkbookViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTBordersImpl$1BorderList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldImpl$1MpMapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCacheFieldsImpl$1CacheFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellStyleXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCellXfsImpl$1XfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$1FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCfRuleImpl$2FormulaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTColorScaleImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTCommentListImpl$1CommentList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTConditionalFormattingImpl$1CfRuleList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataBarImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDataValidationsImpl$1DataValidationList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTDxfsImpl$1DxfList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalDefinedNamesImpl$1DefinedNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalReferencesImpl$1ExternalReferenceList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTExternalSheetNamesImpl$1SheetNameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFillsImpl$1FillList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1NameList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTFontsImpl$1FontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTHyperlinksImpl$1HyperlinkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTIconSetImpl$1CfvoList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTItemsImpl$1ItemList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1MapList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMapInfoImpl$1SchemaList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTMergeCellsImpl$1MergeCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTNumFmtsImpl$1NumFmtList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageBreakImpl$1BrkList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPageFieldsImpl$1PageFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCacheRecordsImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotCachesImpl$1PivotCacheList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTPivotFieldsImpl$1PivotFieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CharsetList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ColorList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1CondenseList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ExtendList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1FamilyList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1IList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1OutlineList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1RFontList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SchemeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1ShadowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1StrikeList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1SzList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1UList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRPrEltImpl$1VertAlignList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowFieldsImpl$1FieldList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRowImpl$1CList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTRstImpl$1RPhList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1BList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1DList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1EList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1MList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1NList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSharedItemsImpl$1SList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetDataImpl$1RowList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1PivotSelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewImpl$1SelectionList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSheetViewsImpl$1SheetViewList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSingleXmlCellsImpl$1SingleXmlCellList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTSstImpl$1SiList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTableColumnsImpl$1TableColumnList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTTablePartsImpl$1TablePartList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorkbookImpl$1FileRecoveryPrList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ColsList', + 'org.openxmlformats.schemas.spreadsheetml.x2006.main.impl.CTWorksheetImpl$1ConditionalFormattingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAltChunk', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTAttr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTBackground', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCaptions', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCellMergeTrackChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCharacterSpacing', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCnf', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColorSchemeMapping', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTColumns', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCompat', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTControl', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlBlock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlCell', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTCustomXmlRun', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDataBinding', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocGrid', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocRsids', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTDocVars', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEastAsianLayout', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEdnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTEm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFDDList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFHelpText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFStatusText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFFTextInput', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFitText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFramePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnDocProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTFtnProps', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTKinsoku', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLevelSuffix', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLineNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLock', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLongHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTLvlLegacy', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMacroName', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMailMerge', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTMultiLevelType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTNumPicBullet', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageBorders', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPageSz', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPaperSource', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTParaRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPerm', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPermStart', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTPlaceholder', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTProof', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTReadingModeInkLockDown', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTRuby', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSaveThroughXslt', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtComboBox', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDate', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtDropDownList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtRow', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSdtText', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSectType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShapeDefaults', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSignedTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTSmartTagType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblGridChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblLayoutType', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblOverlap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPPr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblPrExChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTblStylePr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcMar', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTcPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextDirection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextEffect', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextScale', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTextboxTightWrap', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrPrChange', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangeNumbering', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTrackChangesView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTTwipsMeasure', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTView', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWriteProtection', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.CTWritingStyle', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDateTime', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STDisplacedByCustomXml', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHeightRule', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STHint', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabAlignment', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabLeader', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STPTabRelativeTo', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STProofErr', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STShortHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STThemeColor', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STUcharHexNumber', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.STZoom', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTAbstractNumImpl$1LvlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTBodyImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTCommentsImpl$1CommentList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1AnchorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTDrawingImpl$1InlineList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTEndnotesImpl$1EndnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1CalcOnExitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1DdListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EnabledList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1EntryMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1ExitMacroList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1HelpTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1NameList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1StatusTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFFDataImpl$1TextInputList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFootnotesImpl$1FootnoteList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTFtnEdnImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHdrFtrImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTHyperlinkImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTLatentStylesImpl$1LsdExceptionList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumImpl$1LvlOverrideList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1AbstractNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTNumberingImpl$1NumPicBulletList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTPImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1AnnotationRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1BrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CommentReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ContinuationSeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1CrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DayShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelInstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DelTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1DrawingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1EndnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FldCharList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteRefList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1FootnoteReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1InstrTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1LastRenderedPageBreakList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1MonthShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1NoBreakHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1ObjectList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PgNumList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PictList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1PtabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1RubyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SeparatorList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SoftHyphenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1SymList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearLongList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRImpl$1YearShortList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRowImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1AccList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BarList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BorderBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1BoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1EqArrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1FuncList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1GroupChrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimLowList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1LimUppList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1NaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1PhantList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1R2List', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1RadList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SPreList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSubSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SSupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTRunTrackChangeImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentBlockImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentCellImpl$1TcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtContentRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtEndPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1AliasList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1BibliographyList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1CitationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ComboBoxList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DataBindingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DateList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DocPartObjList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1DropDownListList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1EquationList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1GroupList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1IdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1LockList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PictureList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1PlaceholderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RPrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1RichTextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1ShowingPlcHdrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TemporaryList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSdtPrImpl$1TextList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1FooterReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSectPrImpl$1HeaderReferenceList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1ActiveWritingStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1AttachedSchemaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSettingsImpl$1SmartTagTypeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSimpleFieldImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagPrImpl$1AttrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1FldSimpleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1HyperlinkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1RList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SmartTagList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTSmartTagRunImpl$1SubDocList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStyleImpl$1TblStylePrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTStylesImpl$1StyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTabsImpl$1TabList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblGridBaseImpl$1GridColList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTblImpl$1TrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTcImpl$1TblList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CantSplitList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1CnfStyleList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1DivIdList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1GridBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1HiddenList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1JcList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblCellSpacingList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TblHeaderList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1TrHeightList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WAfterList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTrPrBaseImpl$1WBeforeList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1AltChunkList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1BookmarkStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CommentRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlDelRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlInsRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1CustomXmlMoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1DelList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1InsList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveFromRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1MoveToRangeStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1OMathParaList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermEndList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1PermStartList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1ProofErrList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1SdtList', + 'org.openxmlformats.schemas.wordprocessingml.x2006.main.impl.CTTxbxContentImpl$1TblList', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.ServiceReference', + 'org.osgi.framework.ServiceRegistration', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', + 'org.sqlite.SQLiteConfig', + 'org.tukaani.xz.ARMOptions', + 'org.tukaani.xz.ARMThumbOptions', + 'org.tukaani.xz.DeltaOptions', + 'org.tukaani.xz.FilterOptions', + 'org.tukaani.xz.FinishableWrapperOutputStream', + 'org.tukaani.xz.IA64Options', + 'org.tukaani.xz.LZMA2InputStream', + 'org.tukaani.xz.LZMA2Options', + 'org.tukaani.xz.LZMAInputStream', + 'org.tukaani.xz.PowerPCOptions', + 'org.tukaani.xz.SPARCOptions', + 'org.tukaani.xz.SingleXZInputStream', + 'org.tukaani.xz.UnsupportedOptionsException', + 'org.tukaani.xz.X86Options', + 'org.tukaani.xz.XZ', + 'org.tukaani.xz.XZInputStream', + 'org.tukaani.xz.XZOutputStream', + 'org.w3.x2000.x09.xmldsig.KeyInfoType', + 'org.w3.x2000.x09.xmldsig.SignatureMethodType', + 'org.w3.x2000.x09.xmldsig.SignatureValueType', + 'org.w3.x2000.x09.xmldsig.TransformsType', + 'org.w3.x2000.x09.xmldsig.impl.SignatureTypeImpl$1ObjectList', + 'org.w3.x2000.x09.xmldsig.impl.SignedInfoTypeImpl$1ReferenceList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$1XPathList', + 'org.w3.x2000.x09.xmldsig.impl.TransformTypeImpl$2XPathList', + 'ucar.ma2.DataType', + 'ucar.nc2.Attribute', + 'ucar.nc2.Dimension', + 'ucar.nc2.Group', + 'ucar.nc2.NetcdfFile', + 'ucar.nc2.Variable', + 'ucar.nc2.dataset.NetcdfDataset' ] diff --git a/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.52.jar.sha1 b/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.52.jar.sha1 deleted file mode 100644 index de084c948f4..00000000000 --- a/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4995a870400e1554d1c7ed2afcb5d198fae12db9 diff --git a/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.54.jar.sha1 b/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..79da45c5c42 --- /dev/null +++ b/plugins/mapper-attachments/licenses/bcmail-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +9d9b5432b4b29ef4a853223bc6e19379ef116cca \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.52.jar.sha1 b/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.52.jar.sha1 deleted file mode 100644 index 489ceeaaf36..00000000000 --- a/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b8ffac2bbc6626f86909589c8cc63637cc936504 diff --git a/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.54.jar.sha1 b/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..2d0c3cf4e27 --- /dev/null +++ b/plugins/mapper-attachments/licenses/bcpkix-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +b11bfee99bb11eea344de6e4a07fe89212c55c02 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.52.jar.sha1 b/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.52.jar.sha1 deleted file mode 100644 index 14ecc1be40b..00000000000 --- a/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.52.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -88a941faf9819d371e3174b5ed56a3f3f7d73269 diff --git a/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.54.jar.sha1 b/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.54.jar.sha1 new file mode 100644 index 00000000000..fcda646b42a --- /dev/null +++ b/plugins/mapper-attachments/licenses/bcprov-jdk15on-1.54.jar.sha1 @@ -0,0 +1 @@ +1acdedeb89f1d950d67b73d481eb7736df65eedb \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/fontbox-1.8.10.jar.sha1 b/plugins/mapper-attachments/licenses/fontbox-1.8.10.jar.sha1 deleted file mode 100644 index ce7f9f5d49c..00000000000 --- a/plugins/mapper-attachments/licenses/fontbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -41776c7713e3f3a1ce688bd96459fc597298c340 diff --git a/plugins/mapper-attachments/licenses/fontbox-2.0.1.jar.sha1 b/plugins/mapper-attachments/licenses/fontbox-2.0.1.jar.sha1 new file mode 100644 index 00000000000..0668199b242 --- /dev/null +++ b/plugins/mapper-attachments/licenses/fontbox-2.0.1.jar.sha1 @@ -0,0 +1 @@ +b9d4f0993e015f3f1ce0be9e7300cf62dd7a7f15 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/jempbox-1.8.10.jar.sha1 b/plugins/mapper-attachments/licenses/jempbox-1.8.10.jar.sha1 deleted file mode 100644 index 5a7b1997208..00000000000 --- a/plugins/mapper-attachments/licenses/jempbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40df4e4ca884aadc20b82d5abd0a3679774c55a6 diff --git a/plugins/mapper-attachments/licenses/jempbox-1.8.12.jar.sha1 b/plugins/mapper-attachments/licenses/jempbox-1.8.12.jar.sha1 new file mode 100644 index 00000000000..0e3dcf4573b --- /dev/null +++ b/plugins/mapper-attachments/licenses/jempbox-1.8.12.jar.sha1 @@ -0,0 +1 @@ +426450c573c19f6f2c751a7a52c11931b712c9f6 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/pdfbox-1.8.10.jar.sha1 b/plugins/mapper-attachments/licenses/pdfbox-1.8.10.jar.sha1 deleted file mode 100644 index 98ce1f9d98c..00000000000 --- a/plugins/mapper-attachments/licenses/pdfbox-1.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bc5d1254495be36d0a3b3d6c35f88d05200b9311 diff --git a/plugins/mapper-attachments/licenses/pdfbox-2.0.1.jar.sha1 b/plugins/mapper-attachments/licenses/pdfbox-2.0.1.jar.sha1 new file mode 100644 index 00000000000..1014db34044 --- /dev/null +++ b/plugins/mapper-attachments/licenses/pdfbox-2.0.1.jar.sha1 @@ -0,0 +1 @@ +dbc69649118b7eff278f228c070a40ee559e1f62 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-3.13.jar.sha1 b/plugins/mapper-attachments/licenses/poi-3.13.jar.sha1 deleted file mode 100644 index 09063c1e5e0..00000000000 --- a/plugins/mapper-attachments/licenses/poi-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f59f504ba8c521e61e25f417ec652fd485010f3 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-3.15-beta1.jar.sha1 b/plugins/mapper-attachments/licenses/poi-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..6049604dd97 --- /dev/null +++ b/plugins/mapper-attachments/licenses/poi-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +048bb8326b81323631d9ceb4236cfbd382e56da2 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-ooxml-3.13.jar.sha1 b/plugins/mapper-attachments/licenses/poi-ooxml-3.13.jar.sha1 deleted file mode 100644 index 16784299855..00000000000 --- a/plugins/mapper-attachments/licenses/poi-ooxml-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c364a8f5422d613e3a56db3b4b889f2989d7ee73 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-ooxml-3.15-beta1.jar.sha1 b/plugins/mapper-attachments/licenses/poi-ooxml-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..c3cf49d9246 --- /dev/null +++ b/plugins/mapper-attachments/licenses/poi-ooxml-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +81085a47fdf0d74d473d605c6b3784e26731842e \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.13.jar.sha1 b/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.13.jar.sha1 deleted file mode 100644 index b5a3a05c489..00000000000 --- a/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56fb0b9f3ffc3d7f7fc9b59e17b5fa2c3ab921e7 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 b/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..afd3b676d08 --- /dev/null +++ b/plugins/mapper-attachments/licenses/poi-ooxml-schemas-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +f8bc979ad79908a99483337f1ca2edf78558ac20 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-scratchpad-3.13.jar.sha1 b/plugins/mapper-attachments/licenses/poi-scratchpad-3.13.jar.sha1 deleted file mode 100644 index cc61780e2a5..00000000000 --- a/plugins/mapper-attachments/licenses/poi-scratchpad-3.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09d763275e6c7fa05d47e2581606748669e88c55 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/poi-scratchpad-3.15-beta1.jar.sha1 b/plugins/mapper-attachments/licenses/poi-scratchpad-3.15-beta1.jar.sha1 new file mode 100644 index 00000000000..7056a9fa49e --- /dev/null +++ b/plugins/mapper-attachments/licenses/poi-scratchpad-3.15-beta1.jar.sha1 @@ -0,0 +1 @@ +f4e276aaf97a60a1156388c9e38069122b7ea914 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/tika-core-1.11.jar.sha1 b/plugins/mapper-attachments/licenses/tika-core-1.11.jar.sha1 deleted file mode 100644 index a6dfd778a9c..00000000000 --- a/plugins/mapper-attachments/licenses/tika-core-1.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d37a6b9080c8361e47b2050f69833fd61501ede9 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/tika-core-1.13.jar.sha1 b/plugins/mapper-attachments/licenses/tika-core-1.13.jar.sha1 new file mode 100644 index 00000000000..cfc36a450bd --- /dev/null +++ b/plugins/mapper-attachments/licenses/tika-core-1.13.jar.sha1 @@ -0,0 +1 @@ +1305c798d41d1d7bbf12cb7c0ca184c98eed25ad \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/tika-parsers-1.11.jar.sha1 b/plugins/mapper-attachments/licenses/tika-parsers-1.11.jar.sha1 deleted file mode 100644 index fbbd59efaf9..00000000000 --- a/plugins/mapper-attachments/licenses/tika-parsers-1.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -355dc05d842ed223fc682da472229473ba706d68 \ No newline at end of file diff --git a/plugins/mapper-attachments/licenses/tika-parsers-1.13.jar.sha1 b/plugins/mapper-attachments/licenses/tika-parsers-1.13.jar.sha1 new file mode 100644 index 00000000000..7fb2755d545 --- /dev/null +++ b/plugins/mapper-attachments/licenses/tika-parsers-1.13.jar.sha1 @@ -0,0 +1 @@ +374fde67b9d35f785534b0e6c4953533c31bab5f \ No newline at end of file diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java index f701c3282cc..a5f87f57ac0 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MultifieldAttachmentMapperTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.mapper.attachments; -import org.elasticsearch.common.Base64; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -33,6 +32,7 @@ import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.junit.Before; import java.nio.charset.StandardCharsets; +import java.util.Base64; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.instanceOf; @@ -84,7 +84,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { String originalText = "This is an elasticsearch mapper attachment test."; String forcedName = "dummyname.txt"; - String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1)); + String bytes = Base64.getEncoder().encodeToString(originalText.getBytes(StandardCharsets.ISO_8859_1)); MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); @@ -150,7 +150,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase { String forcedLanguage = randomAsciiOfLength(20); String forcedContentType = randomAsciiOfLength(20); - String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1)); + String bytes = Base64.getEncoder().encodeToString(originalText.getBytes(StandardCharsets.ISO_8859_1)); MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(AttachmentMapper.INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING.getKey(), true).build(), diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java index 6f4654161ee..e6c3a469076 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java @@ -52,11 +52,7 @@ public class AzureBlobContainer extends AbstractBlobContainer { public AzureBlobContainer(String repositoryName, BlobPath path, AzureBlobStore blobStore) { super(path); this.blobStore = blobStore; - String keyPath = path.buildAsString("/"); - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - this.keyPath = keyPath; + this.keyPath = path.buildAsString(); this.repositoryName = repositoryName; } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 85baf00b909..2809b8588f1 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -85,11 +85,7 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { @Override public void delete(BlobPath path) { - String keyPath = path.buildAsString("/"); - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - + String keyPath = path.buildAsString(); try { this.client.deleteFiles(this.accountName, this.locMode, container, keyPath); } catch (URISyntaxException | StorageException e) { diff --git a/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 2c18ef0f54c..00000000000 --- a/plugins/repository-gcs/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 0ad1d24aa9f..00000000000 --- a/plugins/repository-gcs/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java index d8117180ce3..0387187eef9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobContainer.java @@ -42,13 +42,7 @@ public class GoogleCloudStorageBlobContainer extends AbstractBlobContainer { GoogleCloudStorageBlobContainer(BlobPath path, GoogleCloudStorageBlobStore blobStore) { super(path); this.blobStore = blobStore; - - String keyPath = path.buildAsString("/"); - // TODO Move this keyPath logic to the buildAsString() method - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - this.path = keyPath; + this.path = path.buildAsString(); } @Override diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java index 7bf79494440..6ff5aa41819 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/common/blobstore/gcs/GoogleCloudStorageBlobStore.java @@ -88,12 +88,7 @@ public class GoogleCloudStorageBlobStore extends AbstractComponent implements Bl @Override public void delete(BlobPath path) throws IOException { - String keyPath = path.buildAsString("/"); - // TODO Move this keyPath logic to the buildAsString() method - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - deleteBlobsByPrefix(keyPath); + deleteBlobsByPrefix(path.buildAsString()); } @Override diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index a083309891e..a7b8d8233eb 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -18,7 +18,7 @@ */ esplugin { - description 'The S3 repository plugin adds S3 repositories.' + description 'The S3 repository plugin adds S3 repositories' classname 'org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin' } @@ -36,11 +36,16 @@ dependencies { compile "commons-codec:commons-codec:${versions.commonscodec}" compile "com.fasterxml.jackson.core:jackson-databind:2.5.3" compile "com.fasterxml.jackson.core:jackson-annotations:2.5.0" + + // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, + // and whitelist this hack in JarHell + compile 'javax.xml.bind:jaxb-api:2.2.2' } dependencyLicenses { mapping from: /aws-java-sdk-.*/, to: 'aws-java-sdk' mapping from: /jackson-.*/, to: 'jackson' + mapping from: /jaxb-.*/, to: 'jaxb' } test { @@ -60,4 +65,107 @@ thirdPartyAudit.excludes = [ 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', + + // jarhell with jdk (intentionally, because jaxb was removed from default modules in java 9) + 'javax.xml.bind.Binder', + 'javax.xml.bind.ContextFinder$1', + 'javax.xml.bind.ContextFinder', + 'javax.xml.bind.DataBindingException', + 'javax.xml.bind.DatatypeConverter', + 'javax.xml.bind.DatatypeConverterImpl$CalendarFormatter', + 'javax.xml.bind.DatatypeConverterImpl', + 'javax.xml.bind.DatatypeConverterInterface', + 'javax.xml.bind.Element', + 'javax.xml.bind.GetPropertyAction', + 'javax.xml.bind.JAXB$Cache', + 'javax.xml.bind.JAXB', + 'javax.xml.bind.JAXBContext', + 'javax.xml.bind.JAXBElement$GlobalScope', + 'javax.xml.bind.JAXBElement', + 'javax.xml.bind.JAXBException', + 'javax.xml.bind.JAXBIntrospector', + 'javax.xml.bind.JAXBPermission', + 'javax.xml.bind.MarshalException', + 'javax.xml.bind.Marshaller$Listener', + 'javax.xml.bind.Marshaller', + 'javax.xml.bind.Messages', + 'javax.xml.bind.NotIdentifiableEvent', + 'javax.xml.bind.ParseConversionEvent', + 'javax.xml.bind.PrintConversionEvent', + 'javax.xml.bind.PropertyException', + 'javax.xml.bind.SchemaOutputResolver', + 'javax.xml.bind.TypeConstraintException', + 'javax.xml.bind.UnmarshalException', + 'javax.xml.bind.Unmarshaller$Listener', + 'javax.xml.bind.Unmarshaller', + 'javax.xml.bind.UnmarshallerHandler', + 'javax.xml.bind.ValidationEvent', + 'javax.xml.bind.ValidationEventHandler', + 'javax.xml.bind.ValidationEventLocator', + 'javax.xml.bind.ValidationException', + 'javax.xml.bind.Validator', + 'javax.xml.bind.WhiteSpaceProcessor', + 'javax.xml.bind.annotation.DomHandler', + 'javax.xml.bind.annotation.W3CDomHandler', + 'javax.xml.bind.annotation.XmlAccessOrder', + 'javax.xml.bind.annotation.XmlAccessType', + 'javax.xml.bind.annotation.XmlAccessorOrder', + 'javax.xml.bind.annotation.XmlAccessorType', + 'javax.xml.bind.annotation.XmlAnyAttribute', + 'javax.xml.bind.annotation.XmlAnyElement', + 'javax.xml.bind.annotation.XmlAttachmentRef', + 'javax.xml.bind.annotation.XmlAttribute', + 'javax.xml.bind.annotation.XmlElement$DEFAULT', + 'javax.xml.bind.annotation.XmlElement', + 'javax.xml.bind.annotation.XmlElementDecl$GLOBAL', + 'javax.xml.bind.annotation.XmlElementDecl', + 'javax.xml.bind.annotation.XmlElementRef$DEFAULT', + 'javax.xml.bind.annotation.XmlElementRef', + 'javax.xml.bind.annotation.XmlElementRefs', + 'javax.xml.bind.annotation.XmlElementWrapper', + 'javax.xml.bind.annotation.XmlElements', + 'javax.xml.bind.annotation.XmlEnum', + 'javax.xml.bind.annotation.XmlEnumValue', + 'javax.xml.bind.annotation.XmlID', + 'javax.xml.bind.annotation.XmlIDREF', + 'javax.xml.bind.annotation.XmlInlineBinaryData', + 'javax.xml.bind.annotation.XmlList', + 'javax.xml.bind.annotation.XmlMimeType', + 'javax.xml.bind.annotation.XmlMixed', + 'javax.xml.bind.annotation.XmlNs', + 'javax.xml.bind.annotation.XmlNsForm', + 'javax.xml.bind.annotation.XmlRegistry', + 'javax.xml.bind.annotation.XmlRootElement', + 'javax.xml.bind.annotation.XmlSchema', + 'javax.xml.bind.annotation.XmlSchemaType$DEFAULT', + 'javax.xml.bind.annotation.XmlSchemaType', + 'javax.xml.bind.annotation.XmlSchemaTypes', + 'javax.xml.bind.annotation.XmlSeeAlso', + 'javax.xml.bind.annotation.XmlTransient', + 'javax.xml.bind.annotation.XmlType$DEFAULT', + 'javax.xml.bind.annotation.XmlType', + 'javax.xml.bind.annotation.XmlValue', + 'javax.xml.bind.annotation.adapters.CollapsedStringAdapter', + 'javax.xml.bind.annotation.adapters.HexBinaryAdapter', + 'javax.xml.bind.annotation.adapters.NormalizedStringAdapter', + 'javax.xml.bind.annotation.adapters.XmlAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter$DEFAULT', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter', + 'javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters', + 'javax.xml.bind.attachment.AttachmentMarshaller', + 'javax.xml.bind.attachment.AttachmentUnmarshaller', + 'javax.xml.bind.helpers.AbstractMarshallerImpl', + 'javax.xml.bind.helpers.AbstractUnmarshallerImpl', + 'javax.xml.bind.helpers.DefaultValidationEventHandler', + 'javax.xml.bind.helpers.Messages', + 'javax.xml.bind.helpers.NotIdentifiableEventImpl', + 'javax.xml.bind.helpers.ParseConversionEventImpl', + 'javax.xml.bind.helpers.PrintConversionEventImpl', + 'javax.xml.bind.helpers.ValidationEventImpl', + 'javax.xml.bind.helpers.ValidationEventLocatorImpl', + 'javax.xml.bind.util.JAXBResult', + 'javax.xml.bind.util.JAXBSource$1', + 'javax.xml.bind.util.JAXBSource', + 'javax.xml.bind.util.Messages', + 'javax.xml.bind.util.ValidationEventCollector' ] diff --git a/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 b/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 deleted file mode 100644 index 3d35ee99d07..00000000000 --- a/plugins/repository-s3/licenses/httpclient-4.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c47155e3e6c9a41a28db36680b828ced53b8af4 diff --git a/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/repository-s3/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 deleted file mode 100644 index 5d9c0e26c09..00000000000 --- a/plugins/repository-s3/licenses/httpcore-4.3.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f91b7a4aadc5cf486df6e4634748d7dd7a73f06d diff --git a/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 b/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/repository-s3/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jaxb-LICENSE.txt b/plugins/repository-s3/licenses/jaxb-LICENSE.txt new file mode 100644 index 00000000000..a3e62b08787 --- /dev/null +++ b/plugins/repository-s3/licenses/jaxb-LICENSE.txt @@ -0,0 +1,705 @@ +COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0 + +1. Definitions. + + 1.1. Contributor. means each individual or entity that creates or +contributes to the creation of Modifications. + + 1.2. Contributor Version. means the combination of the Original +Software, prior Modifications used by a Contributor (if any), and the +Modifications made by that particular Contributor. + + 1.3. Covered Software. means (a) the Original Software, or (b) +Modifications, or (c) the combination of files containing Original +Software with files containing Modifications, in each case including +portions thereof. + + 1.4. Executable. means the Covered Software in any form other than +Source Code. + + 1.5. Initial Developer. means the individual or entity that first +makes Original Software available under this License. + + 1.6. Larger Work. means a work which combines Covered Software or +portions thereof with code not governed by the terms of this License. + + 1.7. License. means this document. + + 1.8. Licensable. means having the right to grant, to the maximum +extent possible, whether at the time of the initial grant or +subsequently acquired, any and all of the rights conveyed herein. + + 1.9. Modifications. means the Source Code and Executable form of any +of the following: + + A. Any file that results from an addition to, deletion from or +modification of the contents of a file containing Original Software or +previous Modifications; + + B. Any new file that contains any part of the Original Software +or previous Modification; or + + C. Any new file that is contributed or otherwise made available +under the terms of this License. + + 1.10. Original Software. means the Source Code and Executable form of +computer software code that is originally released under this License. + + 1.11. Patent Claims. means any patent claim(s), now owned or +hereafter acquired, including without limitation, method, process, and +apparatus claims, in any patent Licensable by grantor. + + 1.12. Source Code. means (a) the common form of computer software +code in which modifications are made and (b) associated documentation +included in or with such code. + + 1.13. You. (or .Your.) means an individual or a legal entity +exercising rights under, and complying with all of the terms of, this +License. For legal entities, .You. includes any entity which controls, +is controlled by, or is under common control with You. For purposes of +this definition, .control. means (a) the power, direct or indirect, to +cause the direction or management of such entity, whether by contract or +otherwise, or (b) ownership of more than fifty percent (50%) of the +outstanding shares or beneficial ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. + + Conditioned upon Your compliance with Section 3.1 below and +subject to third party intellectual property claims, the Initial +Developer hereby grants You a world-wide, royalty-free, non-exclusive +license: + + (a) under intellectual property rights (other than patent or +trademark) Licensable by Initial Developer, to use, reproduce, modify, +display, perform, sublicense and distribute the Original Software (or +portions thereof), with or without Modifications, and/or as part of a +Larger Work; and + + (b) under Patent Claims infringed by the making, using or +selling of Original Software, to make, have made, use, practice, sell, +and offer for sale, and/or otherwise dispose of the Original Software +(or portions thereof). + + (c) The licenses granted in Sections 2.1(a) and (b) are +effective on the date Initial Developer first distributes or otherwise +makes the Original Software available to a third party under the terms +of this License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is +granted: (1) for code that You delete from the Original Software, or (2) +for infringements caused by: (i) the modification of the Original +Software, or (ii) the combination of the Original Software with other +software or devices. + + 2.2. Contributor Grant. + + Conditioned upon Your compliance with Section 3.1 below and subject +to third party intellectual property claims, each Contributor hereby +grants You a world-wide, royalty-free, non-exclusive license: + + (a) under intellectual property rights (other than patent or +trademark) Licensable by Contributor to use, reproduce, modify, display, +perform, sublicense and distribute the Modifications created by such +Contributor (or portions thereof), either on an unmodified basis, with +other Modifications, as Covered Software and/or as part of a Larger Work; +and + + (b) under Patent Claims infringed by the making, using, or +selling of Modifications made by that Contributor either alone and/or in +combination with its Contributor Version (or portions of such +combination), to make, use, sell, offer for sale, have made, and/or +otherwise dispose of: (1) Modifications made by that Contributor (or +portions thereof); and (2) the combination of Modifications made by that +Contributor with its Contributor Version (or portions of such +combination). + + (c) The licenses granted in Sections 2.2(a) and 2.2(b) are +effective on the date Contributor first distributes or otherwise makes +the Modifications available to a third party. + + (d) Notwithstanding Section 2.2(b) above, no patent license is +granted: (1) for any code that Contributor has deleted from the +Contributor Version; (2) for infringements caused by: (i) third party +modifications of Contributor Version, or (ii) the combination of +Modifications made by that Contributor with other software (except as +part of the Contributor Version) or other devices; or (3) under Patent +Claims infringed by Covered Software in the absence of Modifications +made by that Contributor. + +3. Distribution Obligations. + + 3.1. Availability of Source Code. + Any Covered Software that You distribute or otherwise make +available in Executable form must also be made available in Source Code +form and that Source Code form must be distributed only under the terms +of this License. You must include a copy of this License with every copy +of the Source Code form of the Covered Software You distribute or +otherwise make available. You must inform recipients of any such Covered +Software in Executable form as to how they can obtain such Covered +Software in Source Code form in a reasonable manner on or through a +medium customarily used for software exchange. + + 3.2. Modifications. + The Modifications that You create or to which You contribute are +governed by the terms of this License. You represent that You believe +Your Modifications are Your original creation(s) and/or You have +sufficient rights to grant the rights conveyed by this License. + + 3.3. Required Notices. + You must include a notice in each of Your Modifications that +identifies You as the Contributor of the Modification. You may not +remove or alter any copyright, patent or trademark notices contained +within the Covered Software, or any notices of licensing or any +descriptive text giving attribution to any Contributor or the Initial +Developer. + + 3.4. Application of Additional Terms. + You may not offer or impose any terms on any Covered Software in +Source Code form that alters or restricts the applicable version of this +License or the recipients. rights hereunder. You may choose to offer, +and to charge a fee for, warranty, support, indemnity or liability +obligations to one or more recipients of Covered Software. However, you +may do so only on Your own behalf, and not on behalf of the Initial +Developer or any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity or liability obligation is offered by +You alone, and You hereby agree to indemnify the Initial Developer and +every Contributor for any liability incurred by the Initial Developer or +such Contributor as a result of warranty, support, indemnity or +liability terms You offer. + + 3.5. Distribution of Executable Versions. + You may distribute the Executable form of the Covered Software +under the terms of this License or under the terms of a license of Your +choice, which may contain terms different from this License, provided +that You are in compliance with the terms of this License and that the +license for the Executable form does not attempt to limit or alter the +recipient.s rights in the Source Code form from the rights set forth in +this License. If You distribute the Covered Software in Executable form +under a different license, You must make it absolutely clear that any +terms which differ from this License are offered by You alone, not by +the Initial Developer or Contributor. You hereby agree to indemnify the +Initial Developer and every Contributor for any liability incurred by +the Initial Developer or such Contributor as a result of any such terms +You offer. + + 3.6. Larger Works. + You may create a Larger Work by combining Covered Software with +other code not governed by the terms of this License and distribute the +Larger Work as a single product. In such a case, You must make sure the +requirements of this License are fulfilled for the Covered Software. + +4. Versions of the License. + + 4.1. New Versions. + Sun Microsystems, Inc. is the initial license steward and may +publish revised and/or new versions of this License from time to time. +Each version will be given a distinguishing version number. Except as +provided in Section 4.3, no one other than the license steward has the +right to modify this License. + + 4.2. Effect of New Versions. + You may always continue to use, distribute or otherwise make the +Covered Software available under the terms of the version of the License +under which You originally received the Covered Software. If the Initial +Developer includes a notice in the Original Software prohibiting it from +being distributed or otherwise made available under any subsequent +version of the License, You must distribute and make the Covered +Software available under the terms of the version of the License under +which You originally received the Covered Software. Otherwise, You may +also choose to use, distribute or otherwise make the Covered Software +available under the terms of any subsequent version of the License +published by the license steward. + + 4.3. Modified Versions. + When You are an Initial Developer and You want to create a new +license for Your Original Software, You may create and use a modified +version of this License if You: (a) rename the license and remove any +references to the name of the license steward (except to note that the +license differs from this License); and (b) otherwise make it clear that +the license contains terms which differ from this License. + +5. DISCLAIMER OF WARRANTY. + + COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN .AS IS. BASIS, +WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, +WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE IS FREE OF +DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. +THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED +SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE DEFECTIVE IN ANY +RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME +THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS +DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO +USE OF ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS +DISCLAIMER. + +6. TERMINATION. + + 6.1. This License and the rights granted hereunder will terminate +automatically if You fail to comply with terms herein and fail to cure +such breach within 30 days of becoming aware of the breach. Provisions +which, by their nature, must remain in effect beyond the termination of +this License shall survive. + + 6.2. If You assert a patent infringement claim (excluding +declaratory judgment actions) against Initial Developer or a Contributor +(the Initial Developer or Contributor against whom You assert such claim +is referred to as .Participant.) alleging that the Participant Software +(meaning the Contributor Version where the Participant is a Contributor +or the Original Software where the Participant is the Initial Developer) +directly or indirectly infringes any patent, then any and all rights +granted directly or indirectly to You by such Participant, the Initial +Developer (if the Initial Developer is not the Participant) and all +Contributors under Sections 2.1 and/or 2.2 of this License shall, upon +60 days notice from Participant terminate prospectively and +automatically at the expiration of such 60 day notice period, unless if +within such 60 day period You withdraw Your claim with respect to the +Participant Software against such Participant either unilaterally or +pursuant to a written agreement with Participant. + + 6.3. In the event of termination under Sections 6.1 or 6.2 above, +all end user licenses that have been validly granted by You or any +distributor hereunder prior to termination (excluding licenses granted +to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT +(INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE INITIAL +DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF COVERED SOFTWARE, +OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY +INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER +INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF +GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL +OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN +INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF +LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY +RESULTING FROM SUCH PARTY.S NEGLIGENCE TO THE EXTENT APPLICABLE LAW +PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION +OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION +AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. + + The Covered Software is a .commercial item,. as that term is defined +in 48 C.F.R. 2.101 (Oct. 1995), consisting of .commercial computer +software. (as that term is defined at 48 C.F.R. º 252.227-7014(a)(1)) +and .commercial computer software documentation. as such terms are used +in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and +48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government +End Users acquire Covered Software with only those rights set forth +herein. This U.S. Government Rights clause is in lieu of, and supersedes, +any other FAR, DFAR, or other clause or provision that addresses +Government rights in computer software under this License. + +9. MISCELLANEOUS. + + This License represents the complete agreement concerning subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. This License shall be governed by the +law of the jurisdiction specified in a notice contained within the +Original Software (except to the extent applicable law, if any, provides +otherwise), excluding such jurisdiction.s conflict-of-law provisions. +Any litigation relating to this License shall be subject to the +jurisdiction of the courts located in the jurisdiction and venue +specified in a notice contained within the Original Software, with the +losing party responsible for costs, including, without limitation, court +costs and reasonable attorneys. fees and expenses. The application of +the United Nations Convention on Contracts for the International Sale of +Goods is expressly excluded. Any law or regulation which provides that +the language of a contract shall be construed against the drafter shall +not apply to this License. You agree that You alone are responsible for +compliance with the United States export administration regulations (and +the export control laws and regulation of any other countries) when You +use, distribute or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is +responsible for claims and damages arising, directly or indirectly, out +of its utilization of rights under this License and You agree to work +with Initial Developer and Contributors to distribute such +responsibility on an equitable basis. Nothing herein is intended or +shall be deemed to constitute any admission of liability. + + NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND +DISTRIBUTION LICENSE (CDDL) + + The code released under the CDDL shall be governed by the laws of the +State of California (excluding conflict-of-law provisions). Any +litigation relating to this License shall be subject to the jurisdiction +of the Federal Courts of the Northern District of California and the +state courts of the State of California, with venue lying in Santa Clara +County, California. + + +The GNU General Public License (GPL) Version 2, June 1991 + + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. 59 Temple Place, +Suite 330, Boston, MA 02111-1307 USA + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to +share and change it. By contrast, the GNU General Public License is +intended to guarantee your freedom to share and change free software--to +make sure the software is free for all its users. This General Public +License applies to most of the Free Software Foundation's software and +to any other program whose authors commit to using it. (Some other Free +Software Foundation software is covered by the GNU Library General +Public License instead.) You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. +Our General Public Licenses are designed to make sure that you have the +freedom to distribute copies of free software (and charge for this +service if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs; and that you know you can do these things. + +To protect your rights, we need to make restrictions that forbid anyone +to deny you these rights or to ask you to surrender the rights. These +restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis +or for a fee, you must give the recipients all the rights that you have. +You must make sure that they, too, receive or can get the source code. +And you must show them these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + +Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + +Finally, any free program is threatened constantly by software patents. +We wish to avoid the danger that redistributors of a free program will +individually obtain patent licenses, in effect making the program +proprietary. To prevent this, we have made it clear that any patent must +be licensed for everyone's free use or not licensed at all. + +The precise terms and conditions for copying, distribution and +modification follow. + + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License applies to any program or other work which contains a +notice placed by the copyright holder saying it may be distributed under +the terms of this General Public License. The "Program", below, refers +to any such program or work, and a "work based on the Program" means +either the Program or any derivative work under copyright law: that is +to say, a work containing the Program or a portion of it, either +verbatim or with modifications and/or translated into another language. +(Hereinafter, translation is included without limitation in the term +"modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of running +the Program is not restricted, and the output from the Program is +covered only if its contents constitute a work based on the Program +(independent of having been made by running the Program). Whether that +is true depends on what the Program does. + +1. You may copy and distribute verbatim copies of the Program's source +code as you receive it, in any medium, provided that you conspicuously +and appropriately publish on each copy an appropriate copyright notice +and disclaimer of warranty; keep intact all the notices that refer to +this License and to the absence of any warranty; and give any other +recipients of the Program a copy of this License along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Program or any portion of +it, thus forming a work based on the Program, and copy and distribute +such modifications or work under the terms of Section 1 above, provided +that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices +stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in +whole or in part contains or is derived from the Program or any part +thereof, to be licensed as a whole at no charge to all third parties +under the terms of this License. + + c) If the modified program normally reads commands interactively when +run, you must cause it, when started running for such interactive use in +the most ordinary way, to print or display an announcement including an +appropriate copyright notice and a notice that there is no warranty (or +else, saying that you provide a warranty) and that users may +redistribute the program under these conditions, and telling the user +how to view a copy of this License. (Exception: if the Program itself is +interactive but does not normally print such an announcement, your work +based on the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, and +can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based on +the Program, the distribution of the whole must be on the terms of this +License, whose permissions for other licensees extend to the entire +whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of a +storage or distribution medium does not bring the other work under the +scope of this License. + +3. You may copy and distribute the Program (or a work based on it, under +Section 2) in object code or executable form under the terms of Sections +1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable +source code, which must be distributed under the terms of Sections 1 and +2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three years, +to give any third party, for a charge no more than your cost of +physically performing source distribution, a complete machine-readable +copy of the corresponding source code, to be distributed under the terms +of Sections 1 and 2 above on a medium customarily used for software +interchange; or, + + c) Accompany it with the information you received as to the offer to +distribute corresponding source code. (This alternative is allowed only +for noncommercial distribution and only if you received the program in +object code or executable form with such an offer, in accord with +Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source code +means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to control +compilation and installation of the executable. However, as a special +exception, the source code distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies the +executable. + +If distribution of executable or object code is made by offering access +to copy from a designated place, then offering equivalent access to copy +the source code from the same place counts as distribution of the source +code, even though third parties are not compelled to copy the source +along with the object code. + +4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt otherwise +to copy, modify, sublicense or distribute the Program is void, and will +automatically terminate your rights under this License. However, parties +who have received copies, or rights, from you under this License will +not have their licenses terminated so long as such parties remain in +full compliance. + +5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the Program), +you indicate your acceptance of this License to do so, and all its terms +and conditions for copying, distributing or modifying the Program or +works based on it. + +6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further restrictions +on the recipients' exercise of the rights granted herein. You are not +responsible for enforcing compliance by third parties to this License. + +7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot distribute +so as to satisfy simultaneously your obligations under this License and +any other pertinent obligations, then as a consequence you may not +distribute the Program at all. For example, if a patent license would +not permit royalty-free redistribution of the Program by all those who +receive copies directly or indirectly through you, then the only way you +could satisfy both it and this License would be to refrain entirely from +distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is implemented +by public license practices. Many people have made generous +contributions to the wide range of software distributed through that +system in reliance on consistent application of that system; it is up to +the author/donor to decide if he or she is willing to distribute +software through any other system and a licensee cannot impose that +choice. + +This section is intended to make thoroughly clear what is believed to be +a consequence of the rest of this License. + +8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License may +add an explicit geographical distribution limitation excluding those +countries, so that distribution is permitted only in or among countries +not thus excluded. In such case, this License incorporates the +limitation as if written in the body of this License. + +9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Program does not specify a version +number of this License, you may choose any version ever published by the +Free Software Foundation. + +10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the +author to ask for permission. For software which is copyrighted by the +Free Software Foundation, write to the Free Software Foundation; we +sometimes make exceptions for this. Our decision will be guided by the +two goals of preserving the free status of all derivatives of our free +software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER +EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE +ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. +SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY +SERVICING, REPAIR OR CORRECTION. + +12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR +DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL +DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM +(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED +INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF +THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR +OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these +terms. + +To do so, attach the following notices to the program. It is safest to +attach them to the start of each source file to most effectively convey +the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + One line to give the program's name and a brief idea of what it does. + + Copyright (C) + + This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by the +Free Software Foundation; either version 2 of the License, or (at your +option) any later version. + + This program is distributed in the hope that it will be useful, but +WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +Public License for more details. + + You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software Foundation, +Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show +w'. This is free software, and you are welcome to redistribute it under +certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the +appropriate parts of the General Public License. Of course, the commands +you use may be called something other than `show w' and `show c'; they +could even be mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the +program `Gnomovision' (which makes passes at compilers) written by James +Hacker. + + signature of Ty Coon, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications +with the library. If this is what you want to do, use the GNU Library +General Public License instead of this License. + + +"CLASSPATH" EXCEPTION TO THE GPL VERSION 2 + +Certain source files distributed by Sun Microsystems, Inc. are subject +to the following clarification and special exception to the GPL Version +2, but only where Sun has expressly included in the particular source +file's header the words + +"Sun designates this particular file as subject to the "Classpath" +exception as provided by Sun in the License file that accompanied this +code." + +Linking this library statically or dynamically with other modules is +making a combined work based on this library. Thus, the terms and +conditions of the GNU General Public License Version 2 cover the whole +combination. + +As a special exception, the copyright holders of this library give you +permission to link this library with independent modules to produce an +executable, regardless of the license terms of these independent modules, +and to copy and distribute the resulting executable under terms of your +choice, provided that you also meet, for each linked independent module, +the terms and conditions of the license of that module.? An independent +module is a module which is not derived from or based on this library.? +If you modify this library, you may extend this exception to your +version of the library, but you are not obligated to do so.? If you do +not wish to do so, delete this exception statement from your version. + \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jaxb-NOTICE.txt b/plugins/repository-s3/licenses/jaxb-NOTICE.txt new file mode 100644 index 00000000000..8d1c8b69c3f --- /dev/null +++ b/plugins/repository-s3/licenses/jaxb-NOTICE.txt @@ -0,0 +1 @@ + diff --git a/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 new file mode 100644 index 00000000000..a37e1872389 --- /dev/null +++ b/plugins/repository-s3/licenses/jaxb-api-2.2.2.jar.sha1 @@ -0,0 +1 @@ +aeb3021ca93dde265796d82015beecdcff95bf09 \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java index 4861ccc202b..42df840ce40 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobContainer.java @@ -54,11 +54,7 @@ public class S3BlobContainer extends AbstractBlobContainer { public S3BlobContainer(BlobPath path, S3BlobStore blobStore) { super(path); this.blobStore = blobStore; - String keyPath = path.buildAsString("/"); - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - this.keyPath = keyPath; + this.keyPath = path.buildAsString(); } @Override diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java index 650d71f62ad..991e5f9707f 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java @@ -145,11 +145,7 @@ public class S3BlobStore extends AbstractComponent implements BlobStore { if (prevListing != null) { list = client.listNextBatchOfObjects(prevListing); } else { - String keyPath = path.buildAsString("/"); - if (!keyPath.isEmpty()) { - keyPath = keyPath + "/"; - } - list = client.listObjects(bucket, keyPath); + list = client.listObjects(bucket, path.buildAsString()); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); } for (S3ObjectSummary summary : list.getObjectSummaries()) { diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index 93d361c989c..70889ad5009 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -18,6 +18,6 @@ integTest { cluster { numNodes = 2 numBwcNodes = 1 - bwcVersion = "5.0.0-SNAPSHOT" // this is the same as the current version until we released the first RC + bwcVersion = "5.0.0-alpha3-SNAPSHOT" // this is the same as the current version until we released the first RC } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java new file mode 100644 index 00000000000..8bd2451da57 --- /dev/null +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilElasticsearchCliTests.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +public class EvilElasticsearchCliTests extends ESElasticsearchCliTestCase { + + @SuppressForbidden(reason = "manipulates system properties for testing") + public void testPathHome() throws Exception { + final String pathHome = System.getProperty("es.path.home"); + final String value = randomAsciiOfLength(16); + System.setProperty("es.path.home", value); + + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", value)); + }); + + System.clearProperty("es.path.home"); + final String commandLineValue = randomAsciiOfLength(16); + runTest( + ExitCodes.OK, + true, + output -> {}, + (foreground, pidFile, esSettings) -> { + assertThat(esSettings.size(), equalTo(1)); + assertThat(esSettings, hasEntry("path.home", commandLineValue)); + }, + "-Epath.home=" + commandLineValue); + + if (pathHome != null) System.setProperty("es.path.home", pathHome); + else System.clearProperty("es.path.home"); + } + +} diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index af36d96f442..22b2ef39a88 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.SuppressForbidden; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.UserError; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.settings.Settings; @@ -54,8 +55,10 @@ import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; @@ -129,7 +132,7 @@ public class InstallPluginCommandTests extends ESTestCase { } /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv(FileSystem fs, Function temp) throws IOException { + static Tuple createEnv(FileSystem fs, Function temp) throws IOException { Path home = temp.apply("install-plugin-command-tests"); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); @@ -140,7 +143,7 @@ public class InstallPluginCommandTests extends ESTestCase { Settings settings = Settings.builder() .put("path.home", home) .build(); - return new Environment(settings); + return Tuple.tuple(home, new Environment(settings)); } static Path createPluginDir(Function temp) throws IOException { @@ -185,20 +188,22 @@ public class InstallPluginCommandTests extends ESTestCase { return writeZip(structure, "elasticsearch"); } - static MockTerminal installPlugin(String pluginUrl, Environment env) throws Exception { - return installPlugin(pluginUrl, env, false); + static MockTerminal installPlugin(String pluginUrl, Path home) throws Exception { + return installPlugin(pluginUrl, home, false); } - static MockTerminal installPlugin(String pluginUrl, Environment env, boolean jarHellCheck) throws Exception { + static MockTerminal installPlugin(String pluginUrl, Path home, boolean jarHellCheck) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new InstallPluginCommand(env) { + new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { if (jarHellCheck) { super.jarHellCheck(candidate, pluginsDir); } } - }.execute(terminal, pluginUrl, true); + }.execute(terminal, pluginUrl, true, settings); return terminal; } @@ -275,192 +280,176 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testSomethingWorks() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testSpaceInUrl() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); Path pluginZipWithSpaces = createTempFile("foo bar", ".zip"); try (InputStream in = new URL(pluginZip).openStream()) { Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); } - installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testMalformedUrlNotMaven() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // has two colons, so it appears similar to maven coordinates - MalformedURLException e = expectThrows(MalformedURLException.class, () -> { - installPlugin("://host:1234", env); - }); + MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("no protocol")); } public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(fs, temp); - Files.delete(env.pluginsFile()); + Tuple env = createEnv(fs, temp); + Files.delete(env.v2().pluginsFile()); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); - try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.pluginsFile())) { + try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { pluginsAttrs.setPermissions(new HashSet<>()); String pluginZip = createPlugin("fake", pluginDir); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.pluginsFile().toString())); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString())); } - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBuiltinModule() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testJarHell() throws Exception { // jar hell test needs a real filesystem assumeTrue("real filesystem", isReal); - Environment environment = createEnv(fs, temp); + Tuple environment = createEnv(fs, temp); Path pluginDirectory = createPluginDir(temp); writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin"); String pluginZip = createPlugin("fake", pluginDirectory); // adds plugin.jar with FakePlugin - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { - installPlugin(pluginZip, environment, true); - }); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, environment.v1(), true)); assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); - assertInstallCleaned(environment); + assertInstallCleaned(environment.v2()); } public void testIsolatedPlugins() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); // these both share the same FakePlugin class Path pluginDir1 = createPluginDir(temp); String pluginZip1 = createPlugin("fake1", pluginDir1); - installPlugin(pluginZip1, env); + installPlugin(pluginZip1, env.v1()); Path pluginDir2 = createPluginDir(temp); String pluginZip2 = createPlugin("fake2", pluginDir2); - installPlugin(pluginZip2, env); - assertPlugin("fake1", pluginDir1, env); - assertPlugin("fake2", pluginDir2, env); + installPlugin(pluginZip2, env.v1()); + assertPlugin("fake1", pluginDir1, env.v2()); + assertPlugin("fake2", pluginDir2, env.v2()); } public void testExistingPlugin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + installPlugin(pluginZip, env.v1()); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBin() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testBinNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("elasticsearch", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.binFile().resolve("elasticsearch").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString())); + assertInstallCleaned(env.v2()); } public void testBinPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.binFile())) { + try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { Set perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); binAttrs.setPermissions(perms); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } } public void testConfig() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("custom.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); } public void testExistingConfig() throws Exception { - Environment env = createEnv(fs, temp); - Path envConfigDir = env.configFile().resolve("fake"); + Tuple env = createEnv(fs, temp); + Path envConfigDir = env.v2().configFile().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yaml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path pluginDir = createPluginDir(temp); @@ -469,8 +458,8 @@ public class InstallPluginCommandTests extends ESTestCase { Files.write(configDir.resolve("custom.yaml"), "new config".getBytes(StandardCharsets.UTF_8)); Files.createFile(configDir.resolve("other.yaml")); String pluginZip = createPlugin("fake", pluginDir); - installPlugin(pluginZip, env); - assertPlugin("fake", pluginDir, env); + installPlugin(pluginZip, env.v1()); + assertPlugin("fake", pluginDir, env.v2()); List configLines = Files.readAllLines(envConfigDir.resolve("custom.yaml"), StandardCharsets.UTF_8); assertEquals(1, configLines.size()); assertEquals("existing config", configLines.get(0)); @@ -478,80 +467,68 @@ public class InstallPluginCommandTests extends ESTestCase { } public void testConfigNotDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInConfigDir = pluginDir.resolve("config").resolve("foo"); Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testConfigConflict() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("myconfig.yml")); String pluginZip = createPlugin("elasticsearch.yml", pluginDir); - FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> { - installPlugin(pluginZip, env); - }); - assertTrue(e.getMessage(), e.getMessage().contains(env.configFile().resolve("elasticsearch.yml").toString())); - assertInstallCleaned(env); + FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); + assertTrue(e.getMessage(), e.getMessage().contains(env.v2().configFile().resolve("elasticsearch.yml").toString())); + assertInstallCleaned(env.v2()); } public void testMissingDescriptor() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, "elasticsearch"); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> { - installPlugin(pluginZip, env); - }); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testMissingDirectory() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); - UserError e = expectThrows(UserError.class, () -> { - installPlugin(pluginZip, env); - }); + UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); - assertInstallCleaned(env); + assertInstallCleaned(env.v2()); } public void testZipRelativeOutsideEntryName() throws Exception { - Environment env = createEnv(fs, temp); + Tuple env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); - IOException e = expectThrows(IOException.class, () -> { - installPlugin(pluginZip, env); - }); + IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java index f26857e19af..1422280165c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/ListPluginsCommandTests.java @@ -25,35 +25,47 @@ import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.stream.Collectors; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.inject.spi.HasDependencies; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.Version; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class ListPluginsCommandTests extends ESTestCase { - Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal listPlugins(Environment env) throws Exception { - return listPlugins(env, new String[0]); + static MockTerminal listPlugins(Path home) throws Exception { + return listPlugins(home, new String[0]); } - static MockTerminal listPlugins(Environment env, String[] args) throws Exception { + static MockTerminal listPlugins(Path home, String[] args) throws Exception { + String[] argsAndHome = new String[args.length + 1]; + System.arraycopy(args, 0, argsAndHome, 0, args.length); + argsAndHome[args.length] = "-Epath.home=" + home; MockTerminal terminal = new MockTerminal(); - int status = new ListPluginsCommand(env).main(args, terminal); + int status = new ListPluginsCommand().main(argsAndHome, terminal); assertEquals(ExitCodes.OK, status); return terminal; } @@ -74,49 +86,42 @@ public class ListPluginsCommandTests extends ESTestCase { public void testPluginsDirMissing() throws Exception { - Environment env = createEnv(); Files.delete(env.pluginsFile()); - IOException e = expectThrows(IOException.class, () -> { - listPlugins(env); - }); + IOException e = expectThrows(IOException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Plugins directory missing: " + env.pluginsFile()); } public void testNoPlugins() throws Exception { - MockTerminal terminal = listPlugins(createEnv()); + MockTerminal terminal = listPlugins(home); assertTrue(terminal.getOutput(), terminal.getOutput().isEmpty()); } public void testOnePlugin() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake")); } public void testTwoPlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake2", "org.fake"); - MockTerminal terminal = listPlugins(env); + MockTerminal terminal = listPlugins(home); assertEquals(terminal.getOutput(), buildMultiline("fake1", "fake2")); } public void testPluginWithVerbose() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc", "fake_plugin", "org.fake"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin", "- Plugin information:", "Name: fake_plugin", "Description: fake desc", "Version: 1.0", " * Classname: org.fake")); } public void testPluginWithVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); String[] params = { "-v" }; - MockTerminal terminal = listPlugins(env, params); + MockTerminal terminal = listPlugins(home, params); assertEquals(terminal.getOutput(), buildMultiline("Plugins directory: " + env.pluginsFile(), "fake_plugin1", "- Plugin information:", "Name: fake_plugin1", "Description: fake desc 1", "Version: 1.0", " * Classname: org.fake", "fake_plugin2", "- Plugin information:", "Name: fake_plugin2", @@ -124,26 +129,23 @@ public class ListPluginsCommandTests extends ESTestCase { } public void testPluginWithoutVerboseMultiplePlugins() throws Exception { - Environment env = createEnv(); buildFakePlugin(env, "fake desc 1", "fake_plugin1", "org.fake"); buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2"); - MockTerminal terminal = listPlugins(env, new String[0]); + MockTerminal terminal = listPlugins(home, new String[0]); String output = terminal.getOutput(); assertEquals(output, buildMultiline("fake_plugin1", "fake_plugin2")); } public void testPluginWithoutDescriptorFile() throws Exception{ - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake1")); - NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(env)); + NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> listPlugins(home)); assertEquals(e.getFile(), env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString()); } public void testPluginWithWrongDescriptorFile() throws Exception{ - Environment env = createEnv(); PluginTestUtil.writeProperties(env.pluginsFile().resolve("fake1"), "description", "fake desc"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(env)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> listPlugins(home)); assertEquals(e.getMessage(), "Property [name] is missing in [" + env.pluginsFile().resolve("fake1").resolve(PluginInfo.ES_PLUGIN_PROPERTIES).toString() + "]"); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index d9d5661b834..6528bbc0911 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.cli.UserError; @@ -30,25 +32,32 @@ import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; @LuceneTestCase.SuppressFileSystems("*") public class RemovePluginCommandTests extends ESTestCase { - /** Creates a test environment with bin, config and plugins directories. */ - static Environment createEnv() throws IOException { - Path home = createTempDir(); + private Path home; + private Environment env; + + @Before + public void setUp() throws Exception { + super.setUp(); + home = createTempDir(); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); Files.createDirectories(home.resolve("plugins")); Settings settings = Settings.builder() - .put("path.home", home) - .build(); - return new Environment(settings); + .put("path.home", home) + .build(); + env = new Environment(settings); } - static MockTerminal removePlugin(String name, Environment env) throws Exception { + static MockTerminal removePlugin(String name, Path home) throws Exception { + Map settings = new HashMap<>(); + settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); - new RemovePluginCommand(env).execute(terminal, name); + new RemovePluginCommand().execute(terminal, name, settings); return terminal; } @@ -63,33 +72,28 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testMissing() throws Exception { - Environment env = createEnv(); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("dne", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("dne", home)); assertTrue(e.getMessage(), e.getMessage().contains("Plugin dne not found")); assertRemoveCleaned(env); } public void testBasic() throws Exception { - Environment env = createEnv(); Files.createDirectory(env.pluginsFile().resolve("fake")); Files.createFile(env.pluginsFile().resolve("fake").resolve("plugin.jar")); Files.createDirectory(env.pluginsFile().resolve("fake").resolve("subdir")); Files.createDirectory(env.pluginsFile().resolve("other")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.pluginsFile().resolve("other"))); assertRemoveCleaned(env); } public void testBin() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("fake")); Path binDir = env.binFile().resolve("fake"); Files.createDirectories(binDir); Files.createFile(binDir.resolve("somescript")); - removePlugin("fake", env); + removePlugin("fake", home); assertFalse(Files.exists(env.pluginsFile().resolve("fake"))); assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertFalse(Files.exists(binDir)); @@ -97,14 +101,12 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testBinNotDir() throws Exception { - Environment env = createEnv(); Files.createDirectories(env.pluginsFile().resolve("elasticsearch")); - UserError e = expectThrows(UserError.class, () -> { - removePlugin("elasticsearch", env); - }); + UserError e = expectThrows(UserError.class, () -> removePlugin("elasticsearch", home)); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("elasticsearch"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); assertRemoveCleaned(env); } + } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 63c09890acc..f9cdf5b4f66 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -84,29 +84,10 @@ public class TribeUnitTests extends ESTestCase { tribe2 = null; } - public void testThatTribeClientsIgnoreGlobalSysProps() throws Exception { - System.setProperty("es.cluster.name", "tribe_node_cluster"); - System.setProperty("es.tribe.t1.cluster.name", "tribe1"); - System.setProperty("es.tribe.t2.cluster.name", "tribe2"); - System.setProperty("es.tribe.t1.node_id.seed", Long.toString(random().nextLong())); - System.setProperty("es.tribe.t2.node_id.seed", Long.toString(random().nextLong())); - - try { - assertTribeNodeSuccessfullyCreated(Settings.EMPTY); - } finally { - System.clearProperty("es.cluster.name"); - System.clearProperty("es.tribe.t1.cluster.name"); - System.clearProperty("es.tribe.t2.cluster.name"); - System.clearProperty("es.tribe.t1.node_id.seed"); - System.clearProperty("es.tribe.t2.node_id.seed"); - } - } - public void testThatTribeClientsIgnoreGlobalConfig() throws Exception { Path pathConf = getDataPath("elasticsearch.yml").getParent(); Settings settings = Settings .builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) .build(); assertTribeNodeSuccessfullyCreated(settings); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 9976f072d42..6297ce244f9 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -75,7 +75,6 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { Settings clientSettings = Settings.builder() .put("node.name", "qa_smoke_client_" + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java index f58351b2e7d..8a24ac2408f 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/AbstractMustacheTestCase.java @@ -24,7 +24,6 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.ingest.core.TemplateService; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.script.mustache.MustacheScriptEngineService; @@ -48,7 +47,7 @@ public abstract class AbstractMustacheTestCase extends ESTestCase { new ScriptEngineRegistry(Collections.singletonList( new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, - ScriptMode.ON))); + true))); ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList()); ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(settings, new Environment(settings), Collections.singleton(mustache), null, diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index 07fea76bd8b..e20959a82e1 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -208,10 +208,6 @@ fi install_and_check_plugin discovery gce google-api-client-*.jar } -@test "[$GROUP] install delete by query plugin" { - install_and_check_plugin - delete-by-query -} - @test "[$GROUP] install discovery-azure plugin" { install_and_check_plugin discovery azure azure-core-*.jar } @@ -221,10 +217,10 @@ fi } @test "[$GROUP] install ingest-attachment plugin" { - # we specify the version on the poi-3.13.jar so that the test does + # we specify the version on the poi-3.15-beta1.jar so that the test does # not spuriously pass if the jar is missing but the other poi jars # are present - install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.13.jar + install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.15-beta1.jar poi-ooxml-3.15-beta1.jar poi-ooxml-schemas-*.jar poi-scratchpad-*.jar } @test "[$GROUP] install ingest-geoip plugin" { @@ -347,10 +343,6 @@ fi remove_plugin discovery-gce } -@test "[$GROUP] remove delete by query plugin" { - remove_plugin delete-by-query -} - @test "[$GROUP] remove discovery-azure plugin" { remove_plugin discovery-azure } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index 5f50dfc2850..c4dc8c96f58 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -340,7 +340,7 @@ run_elasticsearch_service() { local CONF_DIR="" local ES_PATH_CONF="" else - local ES_PATH_CONF="-Ees.path.conf=$CONF_DIR" + local ES_PATH_CONF="-Epath.conf=$CONF_DIR" fi # we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0 local background="" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json index 2ae42c089d3..8bb85ca087a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.reroute.json @@ -16,6 +16,10 @@ "type" : "boolean", "description" : "Return an explanation of why the commands can or cannot be executed" }, + "retry_failed": { + "type" : "boolean", + "description" : "Retries allocation of shards that are blocked due to too many subsequent allocation failures" + }, "metric": { "type": "list", "options": ["_all", "blocks", "metadata", "nodes", "routing_table", "master_node", "version"], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 981aea79a1c..d1f10dbfbfc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -1,6 +1,6 @@ { "delete_by_query": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html", "methods": ["POST"], "url": { "path": "/{index}/_delete_by_query", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json index 0dc0088dd5c..80210a2048e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json @@ -1,6 +1,6 @@ { "reindex": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", "methods": ["POST"], "url": { "path": "/_reindex", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json index 921249ab4cd..10d6321212d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json @@ -1,6 +1,6 @@ { "reindex.rethrottle": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html", "methods": ["POST"], "url": { "path": "/_reindex/{task_id}/_rethrottle", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json index 506828beaf7..69d21f4ec1d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json @@ -1,6 +1,6 @@ { "tasks.cancel": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks-cancel.html", + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", "methods": ["POST"], "url": { "path": "/_tasks", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index a2e2b5e916c..313dbe00c46 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -1,6 +1,6 @@ { "update_by_query": { - "documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-reindex.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html", "methods": ["POST"], "url": { "path": "/{index}/_update_by_query", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index a5a67d1a557..51f8fe9ed4c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -24,7 +24,7 @@ - match: $body: | /^(green \s+ - (open|close) \s+ + open \s+ index1 \s+ 1 \s+ 0 \s+ @@ -49,3 +49,24 @@ (\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d.\d\d\dZ) \s* ) $/ + - do: + indices.close: + index: index1 + + - do: + cat.indices: + index: index* + + - match: + $body: | + /^( \s+ + close \s+ + index1 \s+ + \s+ + \s+ + \s+ + \s+ + \s+ + \s* + ) + $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index dfafd833509..97ffae1f802 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -48,7 +48,6 @@ merges.total_docs .+ \n merges.total_size .+ \n merges.total_time .+ \n - percolate.queries .+ \n refresh.total .+ \n refresh.time .+ \n search.fetch_current .+ \n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml index 060752c1ab1..bc1b444ab54 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ingest/40_simulate.yaml @@ -341,7 +341,7 @@ ] } - length: { docs: 2 } - - match: { docs.0.error.type: "illegal_argument_exception" } + - match: { docs.0.error.type: "exception" } - match: { docs.1.doc._source.foo: "BAR" } - length: { docs.1.doc._ingest: 1 } - is_true: docs.1.doc._ingest.timestamp diff --git a/settings.gradle b/settings.gradle index 88217a9dde5..86673589e10 100644 --- a/settings.gradle +++ b/settings.gradle @@ -20,12 +20,12 @@ List projects = [ 'modules:lang-mustache', 'modules:lang-painless', 'modules:reindex', + 'modules:percolator', 'plugins:analysis-icu', 'plugins:analysis-kuromoji', 'plugins:analysis-phonetic', 'plugins:analysis-smartcn', 'plugins:analysis-stempel', - 'plugins:delete-by-query', 'plugins:discovery-azure', 'plugins:discovery-ec2', 'plugins:discovery-gce', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 68eb0420b39..5b79721948e 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,6 +20,8 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; +import org.apache.log4j.Java9Hack; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.SecureSM; import org.elasticsearch.common.Strings; @@ -89,6 +91,10 @@ public class BootstrapForTesting { throw new RuntimeException("found jar hell in test classpath", e); } + if (Constants.JRE_IS_MINIMUM_JAVA9) { + Java9Hack.fixLog4j(); + } + // install security manager if requested if (systemPropertyAsBoolean("tests.security.manager", true)) { try { diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java new file mode 100644 index 00000000000..aa327ae2546 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.hamcrest.CoreMatchers.equalTo; + +abstract class ESElasticsearchCliTestCase extends ESTestCase { + + interface InitConsumer { + void accept(final boolean foreground, final String pidFile, final Map esSettings); + } + + void runTest( + final int expectedStatus, + final boolean expectedInit, + final Consumer outputConsumer, + final InitConsumer initConsumer, + String... args) throws Exception { + final MockTerminal terminal = new MockTerminal(); + try { + final AtomicBoolean init = new AtomicBoolean(); + final int status = Elasticsearch.main(args, new Elasticsearch() { + @Override + void init(final boolean daemonize, final String pidFile, final Map esSettings) { + init.set(true); + initConsumer.accept(!daemonize, pidFile, esSettings); + } + }, terminal); + assertThat(status, equalTo(expectedStatus)); + assertThat(init.get(), equalTo(expectedInit)); + outputConsumer.accept(terminal.getOutput()); + } catch (Throwable t) { + // if an unexpected exception is thrown, we log + // terminal output to aid debugging + logger.info(terminal.getOutput()); + // rethrow so the test fails + throw t; + } + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 309b6622156..4e06bbe8b45 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptMode; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; @@ -70,7 +69,7 @@ public class MockScriptEngine implements ScriptEngineService { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, - MockScriptEngine.NAME, ScriptMode.ON)); + MockScriptEngine.NAME, true)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java similarity index 73% rename from core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java rename to test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 9c47d701f57..c27617a7287 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -17,12 +17,23 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; -import org.elasticsearch.script.ScriptMode; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsModule; +import org.elasticsearch.plugins.PluginsService; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; @@ -76,7 +87,6 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -97,11 +107,6 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.joda.time.DateTime; @@ -109,13 +114,14 @@ import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; +import java.io.Closeable; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -124,8 +130,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService; -import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -135,7 +139,7 @@ import static org.hamcrest.Matchers.not; public abstract class AbstractQueryTestCase> extends ESTestCase { private static final GeohashGenerator geohashGenerator = new GeohashGenerator(); - protected static final String STRING_FIELD_NAME = "mapped_string"; + public static final String STRING_FIELD_NAME = "mapped_string"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; @@ -151,212 +155,42 @@ public abstract class AbstractQueryTestCase> BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME}; private static final int NUMBER_OF_TESTQUERIES = 20; - private static Injector injector; - private static IndicesQueriesRegistry indicesQueriesRegistry; - private static IndexFieldDataService indexFieldDataService; + private static ServiceHolder serviceHolder; private static int queryNameId = 0; - private static SearchModule searchModule; - - protected static IndexFieldDataService indexFieldDataService() { - return indexFieldDataService; - } - - private static Index index; - - protected static Index getIndex() { - return index; - } - - private static Version indexVersionCreated; - - protected static Version getIndexVersionCreated() { - return indexVersionCreated; - } - - private static String[] currentTypes; - - protected static String[] getCurrentTypes() { - return currentTypes; - } - - protected static SearchModule getSearchModule() { - return searchModule; - } - - private static NamedWriteableRegistry namedWriteableRegistry; - private static String[] randomTypes; - private static ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler(); - private static IndexSettings idxSettings; - private static SimilarityService similarityService; - private static MapperService mapperService; - private static PercolatorQueryCache percolatorQueryCache; - private static BitsetFilterCache bitsetFilterCache; - private static ScriptService scriptService; - /** - * Setup for the whole base test class. - */ - @BeforeClass - public static void init() throws IOException { - // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. - indexVersionCreated = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); - Settings settings = Settings.builder() - .put("node.name", AbstractQueryTestCase.class.toString()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) - .build(); - Settings indexSettings = Settings.builder() - .put(ParseFieldMatcher.PARSE_STRICT, true) - .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated).build(); - final ThreadPool threadPool = new ThreadPool(settings); - index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); - idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); - ClusterService clusterService = createClusterService(threadPool); - setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( - new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + protected Index getIndex() { + return serviceHolder.index; + } - SettingsModule settingsModule = new SettingsModule(settings); - settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); - final Client proxy = (Client) Proxy.newProxyInstance( - Client.class.getClassLoader(), - new Class[]{Client.class}, - clientInvocationHandler); - namedWriteableRegistry = new NamedWriteableRegistry(); - ScriptModule scriptModule = new ScriptModule() { - @Override - protected void configure() { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - // no file watching, so we don't need a ResourceWatcherService - .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) - .build(); - MockScriptEngine mockScriptEngine = new MockScriptEngine(); - Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); - multibinder.addBinding().toInstance(mockScriptEngine); - Set engines = new HashSet<>(); - engines.add(mockScriptEngine); - List customContexts = new ArrayList<>(); - ScriptEngineRegistry scriptEngineRegistry = - new ScriptEngineRegistry(Collections - .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, - MockScriptEngine.NAME, ScriptMode.ON))); - bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); - ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); - bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); - ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); - bind(ScriptSettings.class).toInstance(scriptSettings); - try { - ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, - scriptEngineRegistry, scriptContextRegistry, scriptSettings); - bind(ScriptService.class).toInstance(scriptService); - } catch (IOException e) { - throw new IllegalStateException("error while binding ScriptService", e); - } - } - }; - scriptModule.prepareSettings(settingsModule); - searchModule = new SearchModule(settings, namedWriteableRegistry) { - @Override - protected void configureSearch() { - // Skip me - } - }; - injector = new ModulesBuilder().add( - new EnvironmentModule(new Environment(settings)), - settingsModule, - new ThreadPoolModule(threadPool), - new IndicesModule() { - @Override - public void configure() { - // skip services - bindMapperExtension(); - } - }, - scriptModule, - new IndexSettingsModule(index, indexSettings), - searchModule, - new AbstractModule() { - @Override - protected void configure() { - bind(Client.class).toInstance(proxy); - bind(ClusterService.class).toProvider(Providers.of(clusterService)); - bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); - } - } - ).createInjector(); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - scriptService = injector.getInstance(ScriptService.class); - similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); - mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, - () -> createShardContext()); - IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() { - }); - indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, - injector.getInstance(CircuitBreakerService.class), mapperService); - bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { + protected Version getIndexVersionCreated() { + return serviceHolder.indexVersionCreated; + } - } + protected String[] getCurrentTypes() { + return serviceHolder.currentTypes; + } - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { + protected Collection> getPlugins() { + return Collections.emptyList(); + } - } - }); - percolatorQueryCache = new PercolatorQueryCache(idxSettings, () -> createShardContext()); - indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - //create some random type with some default field, those types will stick around for all of the subclasses - currentTypes = new String[randomIntBetween(0, 5)]; - for (int i = 0; i < currentTypes.length; i++) { - String type = randomAsciiOfLengthBetween(1, 10); - mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, - STRING_FIELD_NAME, "type=text", - STRING_FIELD_NAME_2, "type=keyword", - INT_FIELD_NAME, "type=integer", - DOUBLE_FIELD_NAME, "type=double", - BOOLEAN_FIELD_NAME, "type=boolean", - DATE_FIELD_NAME, "type=date", - OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, GEO_POINT_FIELD_MAPPING, - GEO_SHAPE_FIELD_NAME, "type=geo_shape" - ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); - // also add mappings for two inner field in the object field - mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," - + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), - MapperService.MergeReason.MAPPING_UPDATE, false); - currentTypes[i] = type; - } - namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { } @AfterClass public static void afterClass() throws Exception { - injector.getInstance(ClusterService.class).close(); - terminate(injector.getInstance(ThreadPool.class)); - injector = null; - index = null; - currentTypes = null; - namedWriteableRegistry = null; - randomTypes = null; - indicesQueriesRegistry = null; - indexFieldDataService = null; - searchModule = null; - idxSettings = null; - similarityService = null; - mapperService = null; - percolatorQueryCache = null; - bitsetFilterCache = null; - scriptService = null; + IOUtils.close(serviceHolder); + serviceHolder = null; } @Before - public void beforeTest() { - clientInvocationHandler.delegate = this; + public void beforeTest() throws IOException { + if (serviceHolder == null) { + serviceHolder = new ServiceHolder(getPlugins(), this); + } + + serviceHolder.clientInvocationHandler.delegate = this; //set some random types to be queried as part the search request, before each test randomTypes = getRandomTypes(); } @@ -365,12 +199,12 @@ public abstract class AbstractQueryTestCase> TestSearchContext testSearchContext = new TestSearchContext(context) { @Override public MapperService mapperService() { - return mapperService; // need to build / parse inner hits sort fields + return serviceHolder.mapperService; // need to build / parse inner hits sort fields } @Override public IndexFieldDataService fieldData() { - return indexFieldDataService(); // need to build / parse inner hits sort fields + return serviceHolder.indexFieldDataService; // need to build / parse inner hits sort fields } }; testSearchContext.getQueryShardContext().setTypes(types); @@ -379,11 +213,11 @@ public abstract class AbstractQueryTestCase> @After public void afterTest() { - clientInvocationHandler.delegate = null; + serviceHolder.clientInvocationHandler.delegate = null; SearchContext.removeCurrent(); } - protected final QB createTestQueryBuilder() { + public final QB createTestQueryBuilder() { QB query = doCreateTestQueryBuilder(); //we should not set boost and query name for queries that don't parse it if (supportsBoostAndQueryName()) { @@ -688,7 +522,7 @@ public abstract class AbstractQueryTestCase> protected QB assertSerialization(QB testQuery) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testQuery); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); assertEquals(testQuery, deserializedQuery); assertEquals(testQuery.hashCode(), deserializedQuery.hashCode()); @@ -737,7 +571,7 @@ public abstract class AbstractQueryTestCase> protected QB copyQuery(QB query) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(query); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { return (QB) in.readNamedWriteable(QueryBuilder.class); } } @@ -747,17 +581,14 @@ public abstract class AbstractQueryTestCase> * @return a new {@link QueryShardContext} based on the base test index and queryParserService */ protected static QueryShardContext createShardContext() { - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - Client client = injector.getInstance(Client.class); - return new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, - scriptService, indicesQueriesRegistry, client, percolatorQueryCache, null, state); + return serviceHolder.createShardContext(); } /** * @return a new {@link QueryParseContext} based on the base test index and queryParserService */ protected static QueryParseContext createParseContext(XContentParser parser, ParseFieldMatcher matcher) { - QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry, parser, matcher); + QueryParseContext queryParseContext = new QueryParseContext(serviceHolder.indicesQueriesRegistry, parser, matcher); return queryParseContext; } @@ -810,7 +641,7 @@ public abstract class AbstractQueryTestCase> */ protected String getRandomFieldName() { // if no type is set then return a random field name - if (currentTypes == null || currentTypes.length == 0 || randomBoolean()) { + if (serviceHolder.currentTypes == null || serviceHolder.currentTypes.length == 0 || randomBoolean()) { return randomAsciiOfLengthBetween(1, 10); } return randomFrom(MAPPED_LEAF_FIELD_NAMES); @@ -843,11 +674,11 @@ public abstract class AbstractQueryTestCase> protected String[] getRandomTypes() { String[] types; - if (currentTypes.length > 0 && randomBoolean()) { - int numberOfQueryTypes = randomIntBetween(1, currentTypes.length); + if (serviceHolder.currentTypes.length > 0 && randomBoolean()) { + int numberOfQueryTypes = randomIntBetween(1, serviceHolder.currentTypes.length); types = new String[numberOfQueryTypes]; for (int i = 0; i < numberOfQueryTypes; i++) { - types[i] = randomFrom(currentTypes); + types[i] = randomFrom(serviceHolder.currentTypes); } } else { if (randomBoolean()) { @@ -860,7 +691,7 @@ public abstract class AbstractQueryTestCase> } protected String getRandomType() { - return (currentTypes.length == 0) ? MetaData.ALL : randomFrom(currentTypes); + return (serviceHolder.currentTypes.length == 0) ? MetaData.ALL : randomFrom(serviceHolder.currentTypes); } public static String randomGeohash(int minPrecision, int maxPrecision) { @@ -903,12 +734,6 @@ public abstract class AbstractQueryTestCase> return randomFrom("1", "-1", "75%", "-25%", "2<75%", "2<-25%"); } - protected static String randomTimeZone() { - return randomFrom(TIMEZONE_IDS); - } - - private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); - private static class ClientInvocationHandler implements InvocationHandler { AbstractQueryTestCase delegate; @@ -1017,4 +842,185 @@ public abstract class AbstractQueryTestCase> return query; } + static class ServiceHolder implements Closeable { + + private final Injector injector; + private final IndicesQueriesRegistry indicesQueriesRegistry; + private final IndexFieldDataService indexFieldDataService; + private final SearchModule searchModule; + private final Index index; + private final Version indexVersionCreated; + private final String[] currentTypes; + private final NamedWriteableRegistry namedWriteableRegistry; + private final ClientInvocationHandler clientInvocationHandler = new ClientInvocationHandler(); + private final IndexSettings idxSettings; + private final SimilarityService similarityService; + private final MapperService mapperService; + private final BitsetFilterCache bitsetFilterCache; + private final ScriptService scriptService; + + public ServiceHolder(Collection> plugins, AbstractQueryTestCase testCase) throws IOException { + // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. + indexVersionCreated = randomBoolean() ? Version.CURRENT + : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); + Settings settings = Settings.builder() + .put("node.name", AbstractQueryTestCase.class.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) + .build(); + Settings indexSettings = Settings.builder() + .put(ParseFieldMatcher.PARSE_STRICT, true) + .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated).build(); + final ThreadPool threadPool = new ThreadPool(settings); + index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); + ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); + ClusterServiceUtils.setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( + new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + Environment env = InternalSettingsPreparer.prepareEnvironment(settings, null); + PluginsService pluginsService =new PluginsService(settings, env.modulesFile(), env.pluginsFile(), plugins); + + SettingsModule settingsModule = new SettingsModule(settings); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + final Client proxy = (Client) Proxy.newProxyInstance( + Client.class.getClassLoader(), + new Class[]{Client.class}, + clientInvocationHandler); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + ScriptModule scriptModule = new ScriptModule() { + @Override + protected void configure() { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + // no file watching, so we don't need a ResourceWatcherService + .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false) + .build(); + MockScriptEngine mockScriptEngine = new MockScriptEngine(); + Multibinder multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class); + multibinder.addBinding().toInstance(mockScriptEngine); + Set engines = new HashSet<>(); + engines.add(mockScriptEngine); + List customContexts = new ArrayList<>(); + ScriptEngineRegistry scriptEngineRegistry = + new ScriptEngineRegistry(Collections + .singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, + MockScriptEngine.NAME, true))); + bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry); + ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts); + bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry); + ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); + bind(ScriptSettings.class).toInstance(scriptSettings); + try { + ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null, + scriptEngineRegistry, scriptContextRegistry, scriptSettings); + bind(ScriptService.class).toInstance(scriptService); + } catch (IOException e) { + throw new IllegalStateException("error while binding ScriptService", e); + } + } + }; + scriptModule.prepareSettings(settingsModule); + searchModule = new SearchModule(settings, namedWriteableRegistry) { + @Override + protected void configureSearch() { + // Skip me + } + }; + ModulesBuilder modulesBuilder = new ModulesBuilder(); + for (Module pluginModule : pluginsService.nodeModules()) { + modulesBuilder.add(pluginModule); + } + modulesBuilder.add(new PluginsModule(pluginsService)); + modulesBuilder.add( + new EnvironmentModule(new Environment(settings)), + settingsModule, + new ThreadPoolModule(threadPool), + new IndicesModule() { + @Override + public void configure() { + // skip services + bindMapperExtension(); + } + }, + scriptModule, + new IndexSettingsModule(index, indexSettings), + searchModule, + new AbstractModule() { + @Override + protected void configure() { + bind(Client.class).toInstance(proxy); + bind(ClusterService.class).toProvider(Providers.of(clusterService)); + bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); + } + } + ); + pluginsService.processModules(modulesBuilder); + injector = modulesBuilder.createInjector(); + IndexScopedSettings indexScopedSettings = injector.getInstance(IndexScopedSettings.class); + idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); + AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + scriptService = injector.getInstance(ScriptService.class); + similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); + MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); + mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, + () -> createShardContext()); + IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() { + }); + indexFieldDataService = new IndexFieldDataService(idxSettings, indicesFieldDataCache, + injector.getInstance(CircuitBreakerService.class), mapperService); + bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { + @Override + public void onCache(ShardId shardId, Accountable accountable) { + + } + + @Override + public void onRemoval(ShardId shardId, Accountable accountable) { + + } + }); + indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); + //create some random type with some default field, those types will stick around for all of the subclasses + currentTypes = new String[randomIntBetween(0, 5)]; + for (int i = 0; i < currentTypes.length; i++) { + String type = randomAsciiOfLengthBetween(1, 10); + mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, + STRING_FIELD_NAME, "type=text", + STRING_FIELD_NAME_2, "type=keyword", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object", + GEO_POINT_FIELD_NAME, GEO_POINT_FIELD_MAPPING, + GEO_SHAPE_FIELD_NAME, "type=geo_shape" + ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + // also add mappings for two inner field in the object field + mapperService.merge(type, new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," + + "\"properties\":{\"" + DATE_FIELD_NAME + "\":{\"type\":\"date\"},\"" + INT_FIELD_NAME + "\":{\"type\":\"integer\"}}}}}"), + MapperService.MergeReason.MAPPING_UPDATE, false); + currentTypes[i] = type; + } + testCase.initializeAdditionalMappings(mapperService); + this.namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); + } + + public void close() throws IOException { + injector.getInstance(ClusterService.class).close(); + try { + terminate(injector.getInstance(ThreadPool.class)); + } catch (InterruptedException e) { + IOUtils.reThrow(e); + } + } + + QueryShardContext createShardContext() { + ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); + Client client = injector.getInstance(Client.class); + return new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, + scriptService, indicesQueriesRegistry, client, null, state); + } + + } + } diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java similarity index 98% rename from core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java rename to test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index e3f132b18fb..3e542231001 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.cluster.service; +package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.DummyTransportAddress; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 247a340d8c4..8f62617ee7b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; -import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; @@ -124,7 +123,6 @@ import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; import org.hamcrest.Matchers; -import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -1826,23 +1824,6 @@ public abstract class ESIntegTestCase extends ESTestCase { return perTestRatio; } - /** - * Returns a random JODA Time Zone based on Java Time Zones - */ - public static DateTimeZone randomDateTimeZone() { - DateTimeZone timeZone; - - // It sounds like some Java Time Zones are unknown by JODA. For example: Asia/Riyadh88 - // We need to fallback in that case to a known time zone - try { - timeZone = DateTimeZone.forTimeZone(RandomizedTest.randomTimeZone()); - } catch (IllegalArgumentException e) { - timeZone = DateTimeZone.forOffsetHours(randomIntBetween(-12, 12)); - } - - return timeZone; - } - /** * Returns path to a random directory that can be used to create a temporary file system repo */ @@ -1968,7 +1949,7 @@ public abstract class ESIntegTestCase extends ESTestCase { assert INSTANCE == null; if (isSuiteScopedTest(targetClass)) { // note we need to do this this way to make sure this is reproducible - INSTANCE = (ESIntegTestCase) targetClass.newInstance(); + INSTANCE = (ESIntegTestCase) targetClass.getConstructor().newInstance(); boolean success = false; try { INSTANCE.printTestMessage("setup"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 1e75f3d8261..7875f8fd20b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -185,7 +185,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("http.enabled", false) .put(Node.NODE_LOCAL_SETTING.getKey(), true) .put(Node.NODE_DATA_SETTING.getKey(), true) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // make sure we get what we set :) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Node build = new MockNode(settings, getVersion(), getPlugins()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3dbbf25e202..6ed9a746f46 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -61,6 +61,7 @@ import org.elasticsearch.search.MockSearchService; import org.elasticsearch.test.junit.listeners.LoggingListener; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; import org.elasticsearch.threadpool.ThreadPool; +import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -398,6 +399,15 @@ public abstract class ESTestCase extends LuceneTestCase { return randomTimeValue(1, 1000); } + /** + * generate a random DateTimeZone from the ones available in joda library + */ + public static DateTimeZone randomDateTimeZone() { + List ids = new ArrayList<>(DateTimeZone.getAvailableIDs()); + Collections.sort(ids); + return DateTimeZone.forID(randomFrom(ids)); + } + /** * helper to randomly perform on consumer with value */ @@ -433,24 +443,13 @@ public abstract class ESTestCase extends LuceneTestCase { * Runs the code block for 10 seconds waiting for no assertion to trip. */ public static void assertBusy(Runnable codeBlock) throws Exception { - assertBusy(Executors.callable(codeBlock), 10, TimeUnit.SECONDS); - } - - public static void assertBusy(Runnable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception { - assertBusy(Executors.callable(codeBlock), maxWaitTime, unit); - } - - /** - * Runs the code block for 10 seconds waiting for no assertion to trip. - */ - public static V assertBusy(Callable codeBlock) throws Exception { - return assertBusy(codeBlock, 10, TimeUnit.SECONDS); + assertBusy(codeBlock, 10, TimeUnit.SECONDS); } /** * Runs the code block for the provided interval, waiting for no assertions to trip. */ - public static V assertBusy(Callable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception { + public static void assertBusy(Runnable codeBlock, long maxWaitTime, TimeUnit unit) throws Exception { long maxTimeInMillis = TimeUnit.MILLISECONDS.convert(maxWaitTime, unit); long iterations = Math.max(Math.round(Math.log10(maxTimeInMillis) / Math.log10(2)), 1); long timeInMillis = 1; @@ -458,7 +457,8 @@ public abstract class ESTestCase extends LuceneTestCase { List failures = new ArrayList<>(); for (int i = 0; i < iterations; i++) { try { - return codeBlock.call(); + codeBlock.run(); + return; } catch (AssertionError e) { failures.add(e); } @@ -469,7 +469,7 @@ public abstract class ESTestCase extends LuceneTestCase { timeInMillis = maxTimeInMillis - sum; Thread.sleep(Math.max(timeInMillis, 0)); try { - return codeBlock.call(); + codeBlock.run(); } catch (AssertionError e) { for (AssertionError failure : failures) { e.addSuppressed(failure); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 4625aa77e25..c08c2793eaa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -51,7 +51,6 @@ import java.util.concurrent.TimeUnit; final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this @@ -100,8 +99,8 @@ final class ExternalNode implements Closeable { } else { params.add("bin/elasticsearch.bat"); } - params.add("-Des.cluster.name=" + clusterName); - params.add("-Des.node.name=" + nodeName); + params.add("-Ecluster.name=" + clusterName); + params.add("-Enode.name=" + nodeName); Settings.Builder externaNodeSettingsBuilder = Settings.builder(); for (Map.Entry entry : settings.getAsMap().entrySet()) { switch (entry.getKey()) { @@ -122,11 +121,11 @@ final class ExternalNode implements Closeable { } this.externalNodeSettings = externaNodeSettingsBuilder.put(REQUIRED_SETTINGS).build(); for (Map.Entry entry : externalNodeSettings.getAsMap().entrySet()) { - params.add("-Des." + entry.getKey() + "=" + entry.getValue()); + params.add("-E" + entry.getKey() + "=" + entry.getValue()); } - params.add("-Des.path.home=" + PathUtils.get(".").toAbsolutePath()); - params.add("-Des.path.conf=" + path + "/config"); + params.add("-Epath.home=" + PathUtils.get(".").toAbsolutePath()); + params.add("-Epath.conf=" + path + "/config"); ProcessBuilder builder = new ProcessBuilder(params); builder.directory(path.toFile()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 5372c319dae..71fe622d8c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -73,7 +73,6 @@ public final class ExternalTestCluster extends TestCluster { Settings clientSettings = Settings.builder() .put(additionalSettings) .put("node.name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index ad365d42850..789eb693f7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -66,6 +66,16 @@ public class IndexSettingsModule extends AbstractModule { return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } + public static IndexSettings newIndexSettings(Index index, Settings settings, IndexScopedSettings indexScopedSettings) { + Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(settings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); + return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), indexScopedSettings); + } + public static IndexSettings newIndexSettings(final IndexMetaData indexMetaData, Setting... setting) { Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); if (setting.length > 0) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 3199a27b9a5..69f811b7d07 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -80,7 +80,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; -import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.service.NodeService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; @@ -291,11 +290,10 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(TransportSettings.PORT.getKey(), TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); - builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true); builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); - if (Strings.hasLength(System.getProperty("es.logger.level"))) { - builder.put("logger.level", System.getProperty("es.logger.level")); + if (Strings.hasLength(System.getProperty("tests.es.logger.level"))) { + builder.put("logger.level", System.getProperty("tests.es.logger.level")); } if (Strings.hasLength(System.getProperty("es.logger.prefix"))) { builder.put("logger.prefix", System.getProperty("es.logger.prefix")); @@ -319,14 +317,14 @@ public final class InternalTestCluster extends TestCluster { public static String configuredNodeMode() { Builder builder = Settings.builder(); - if (Strings.isEmpty(System.getProperty("es.node.mode")) && Strings.isEmpty(System.getProperty("es.node.local"))) { + if (Strings.isEmpty(System.getProperty("tests.es.node.mode")) && Strings.isEmpty(System.getProperty("tests.node.local"))) { return "local"; // default if nothing is specified } - if (Strings.hasLength(System.getProperty("es.node.mode"))) { - builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("es.node.mode")); + if (Strings.hasLength(System.getProperty("tests.es.node.mode"))) { + builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("tests.es.node.mode")); } - if (Strings.hasLength(System.getProperty("es.node.local"))) { - builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("es.node.local")); + if (Strings.hasLength(System.getProperty("tests.es.node.local"))) { + builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("tests.es.node.local")); } if (DiscoveryNode.isLocalNode(builder.build())) { return "local"; @@ -882,7 +880,6 @@ public final class InternalTestCluster extends TestCluster { .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) - .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true) .put(settings); if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 5cac904df35..d8ea1f60bbd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -38,7 +38,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; @@ -73,7 +72,6 @@ public class TestSearchContext extends SearchContext { final IndexService indexService; final IndexFieldDataService indexFieldDataService; final BitsetFilterCache fixedBitSetFilterCache; - final PercolatorQueryCache percolatorQueryCache; final ThreadPool threadPool; final Map, Collector> queryCollectors = new HashMap<>(); final IndexShard indexShard; @@ -101,7 +99,6 @@ public class TestSearchContext extends SearchContext { this.indexService = indexService; this.indexFieldDataService = indexService.fieldData(); this.fixedBitSetFilterCache = indexService.cache().bitsetFilterCache(); - this.percolatorQueryCache = indexService.cache().getPercolatorQueryCache(); this.threadPool = threadPool; this.indexShard = indexService.getShardOrNull(0); this.scriptService = scriptService; @@ -116,7 +113,6 @@ public class TestSearchContext extends SearchContext { this.indexFieldDataService = null; this.threadPool = null; this.fixedBitSetFilterCache = null; - this.percolatorQueryCache = null; this.indexShard = null; scriptService = null; this.queryShardContext = queryShardContext; @@ -323,11 +319,6 @@ public class TestSearchContext extends SearchContext { return indexFieldDataService; } - @Override - public PercolatorQueryCache percolatorQueryCache() { - return percolatorQueryCache; - } - @Override public long timeoutInMillis() { return 0; diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index c0809290177..d10c113b590 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -29,15 +29,12 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -57,7 +54,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchModule; @@ -68,7 +64,6 @@ import org.elasticsearch.test.rest.client.http.HttpResponse; import org.hamcrest.CoreMatchers; import org.hamcrest.Matcher; import org.hamcrest.Matchers; -import org.junit.Assert; import java.io.IOException; import java.lang.reflect.Constructor; @@ -81,9 +76,6 @@ import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; -import java.util.function.Function; -import java.util.function.Predicate; -import java.util.stream.Collectors; import static org.apache.lucene.util.LuceneTestCase.random; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -253,13 +245,6 @@ public class ElasticsearchAssertions { assertVersionSerializable(countResponse); } - public static void assertMatchCount(PercolateResponse percolateResponse, long expectedHitCount) { - if (percolateResponse.getCount() != expectedHitCount) { - fail("Count is " + percolateResponse.getCount() + " but " + expectedHitCount + " was expected. " + formatShardStatus(percolateResponse)); - } - assertVersionSerializable(percolateResponse); - } - public static void assertExists(GetResponse response) { String message = String.format(Locale.ROOT, "Expected %s/%s/%s to exist, but does not", response.getIndex(), response.getType(), response.getId()); assertThat(message, response.isExists(), is(true)); @@ -334,12 +319,6 @@ public class ElasticsearchAssertions { } } - public static void assertFailures(PercolateResponse percolateResponse) { - assertThat("Expected at least one shard failure, got none", - percolateResponse.getShardFailures().length, greaterThan(0)); - assertVersionSerializable(percolateResponse); - } - public static void assertNoFailures(BroadcastResponse response) { assertThat("Unexpected ShardFailures: " + Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); assertVersionSerializable(response); diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 6142edb9394..2bdec79f3a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -137,10 +137,10 @@ public class ReproduceInfoPrinter extends RunListener { } public ReproduceErrorMessageBuilder appendESProperties() { - appendProperties("es.logger.level"); + appendProperties("tests.es.logger.level"); if (inVerifyPhase()) { // these properties only make sense for integration tests - appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, + appendProperties("tests.es.node.mode", "tests.es.node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 5fb6e199b17..cb35653b103 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -19,16 +19,15 @@ package org.elasticsearch.test.rest.client; import com.carrotsearch.randomizedtesting.RandomizedTest; - import org.apache.http.config.Registry; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; -import org.apache.http.conn.ssl.SSLContexts; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContexts; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -134,7 +133,8 @@ public class RestClient implements Closeable { * @throws RestException if the obtained status code is non ok, unless the specific error code needs to be ignored * according to the ignore parameter received as input (which won't get sent to elasticsearch) */ - public RestResponse callApi(String apiName, Map params, String body, Map headers) throws IOException, RestException { + public RestResponse callApi(String apiName, Map params, String body, Map headers) + throws IOException, RestException { List ignores = new ArrayList<>(); Map requestParams = null; @@ -220,7 +220,8 @@ public class RestClient implements Closeable { if (restApi.getParams().contains(entry.getKey()) || ALWAYS_ACCEPTED_QUERY_STRING_PARAMS.contains(entry.getKey())) { httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } else { - throw new IllegalArgumentException("param [" + entry.getKey() + "] not supported in [" + restApi.getName() + "] api"); + throw new IllegalArgumentException("param [" + entry.getKey() + + "] not supported in [" + restApi.getName() + "] api"); } } } @@ -293,10 +294,8 @@ public class RestClient implements Closeable { try (InputStream is = Files.newInputStream(path)) { keyStore.load(is, keystorePass.toCharArray()); } - SSLContext sslcontext = SSLContexts.custom() - .loadTrustMaterial(keyStore, null) - .build(); - sslsf = new SSLConnectionSocketFactory(sslcontext, StrictHostnameVerifier.INSTANCE); + SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); + sslsf = new SSLConnectionSocketFactory(sslcontext); } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { throw new RuntimeException(e); } @@ -308,7 +307,8 @@ public class RestClient implements Closeable { .register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", sslsf) .build(); - return HttpClients.createMinimal(new PoolingHttpClientConnectionManager(socketFactoryRegistry, null, null, null, 15, TimeUnit.SECONDS)); + return HttpClients.createMinimal( + new PoolingHttpClientConnectionManager(socketFactoryRegistry, null, null, null, 15, TimeUnit.SECONDS)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java deleted file mode 100644 index 33a92ceb417..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.client; - -import org.apache.http.conn.ssl.X509HostnameVerifier; -import org.apache.http.conn.util.InetAddressUtils; - -import javax.net.ssl.SSLException; -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocket; -import java.io.IOException; -import java.security.cert.X509Certificate; - -/** - * A custom {@link X509HostnameVerifier} implementation that wraps calls to the {@link org.apache.http.conn.ssl.StrictHostnameVerifier} and - * properly handles IPv6 addresses that come from a URL in the form http://[::1]:9200/ by removing the surrounding brackets. - * - * This is a variation of the fix for HTTPCLIENT-1698, which is not - * released yet as of Apache HttpClient 4.5.1 - */ -final class StrictHostnameVerifier implements X509HostnameVerifier { - - static final StrictHostnameVerifier INSTANCE = new StrictHostnameVerifier(); - - // We need to wrap the default verifier for HttpClient since we use an older version and the following issue is not - // fixed in a released version yet https://issues.apache.org/jira/browse/HTTPCLIENT-1698 - // TL;DR we need to strip '[' and ']' from IPv6 addresses if they come from a URL - private final X509HostnameVerifier verifier = new org.apache.http.conn.ssl.StrictHostnameVerifier(); - - private StrictHostnameVerifier() {} - - @Override - public boolean verify(String host, SSLSession sslSession) { - return verifier.verify(stripBracketsIfNecessary(host), sslSession); - } - - @Override - public void verify(String host, SSLSocket ssl) throws IOException { - verifier.verify(stripBracketsIfNecessary(host), ssl); - } - - @Override - public void verify(String host, X509Certificate cert) throws SSLException { - verifier.verify(stripBracketsIfNecessary(host), cert); - } - - @Override - public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException { - verifier.verify(stripBracketsIfNecessary(host), cns, subjectAlts); - } - - private String stripBracketsIfNecessary(String host) { - if (host.startsWith("[") && host.endsWith("]")) { - String newHost = host.substring(1, host.length() - 1); - assert InetAddressUtils.isIPv6Address(newHost); - return newHost; - } - return host; - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java deleted file mode 100644 index 7bbda67fbdb..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.client; - -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import javax.net.ssl.SSLSession; -import javax.net.ssl.SSLSocket; -import javax.security.auth.x500.X500Principal; -import java.security.cert.Certificate; -import java.security.cert.X509Certificate; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - * Tests for the {@link StrictHostnameVerifier} to validate that it can verify IPv6 addresses with and without bracket notation, in - * addition to other address types. - */ -public class StrictHostnameVerifierTests extends ESTestCase { - - private static final int IP_SAN_TYPE = 7; - private static final int DNS_SAN_TYPE = 2; - - private static final String[] CNS = new String[] { "my node" }; - private static final String[] IP_SANS = new String[] { "127.0.0.1", "192.168.1.1", "::1" }; - private static final String[] DNS_SANS = new String[] { "localhost", "computer", "localhost6" }; - - private SSLSocket sslSocket; - private SSLSession sslSession; - private X509Certificate certificate; - - @Before - public void setupMocks() throws Exception { - sslSocket = mock(SSLSocket.class); - sslSession = mock(SSLSession.class); - certificate = mock(X509Certificate.class); - Collection> subjectAlternativeNames = new ArrayList<>(); - for (String san : IP_SANS) { - subjectAlternativeNames.add(Arrays.asList(IP_SAN_TYPE, san)); - } - for (String san : DNS_SANS) { - subjectAlternativeNames.add(Arrays.asList(DNS_SAN_TYPE, san)); - } - - when(sslSocket.getSession()).thenReturn(sslSession); - when(sslSession.getPeerCertificates()).thenReturn(new Certificate[] { certificate }); - when(certificate.getSubjectX500Principal()).thenReturn(new X500Principal("CN=" + CNS[0])); - when(certificate.getSubjectAlternativeNames()).thenReturn(subjectAlternativeNames); - } - - public void testThatIPv6WithBracketsWorks() throws Exception { - final String ipv6Host = "[::1]"; - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); - } - - public void testThatIPV6WithoutBracketWorks() throws Exception { - final String ipv6Host = "::1"; - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); - } - - public void testThatIPV4Works() throws Exception { - final String ipv4Host = randomFrom("127.0.0.1", "192.168.1.1"); - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, CNS, IP_SANS); - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(ipv4Host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSession)); - } - - public void testThatHostnameWorks() throws Exception { - final String host = randomFrom(DNS_SANS); - - // an exception will be thrown if verification fails - StrictHostnameVerifier.INSTANCE.verify(host, CNS, DNS_SANS); - StrictHostnameVerifier.INSTANCE.verify(host, sslSocket); - StrictHostnameVerifier.INSTANCE.verify(host, certificate); - - // this is the only one we can assert on - assertTrue(StrictHostnameVerifier.INSTANCE.verify(host, sslSession)); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java index fa9c5cf099a..d3f93939c2e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java @@ -18,6 +18,11 @@ */ package org.elasticsearch.test.rest.parser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.TestSection; + import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; @@ -25,11 +30,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.TestSection; - /** * Parser for a complete test suite (yaml file) */ @@ -57,14 +57,11 @@ public class RestTestSuiteParser implements RestTestFragmentParser methods = new ArrayList<>(); private List paths = new ArrayList<>(); @@ -43,7 +44,8 @@ public class RestApi { NOT_SUPPORTED, OPTIONAL, REQUIRED } - RestApi(String name) { + RestApi(String location, String name) { + this.location = location; this.name = name; } @@ -51,6 +53,10 @@ public class RestApi { return name; } + public String getLocation() { + return location; + } + public List getMethods() { return methods; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java index 0328e4c87d8..95fe132471a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestApiParser.java @@ -27,113 +27,107 @@ import java.io.IOException; */ public class RestApiParser { - public RestApi parse(XContentParser parser) throws IOException { + public RestApi parse(String location, XContentParser parser) throws IOException { - try { - while ( parser.nextToken() != XContentParser.Token.FIELD_NAME ) { - //move to first field name - } - - RestApi restApi = new RestApi(parser.currentName()); - - int level = -1; - while (parser.nextToken() != XContentParser.Token.END_OBJECT || level >= 0) { - - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - if ("methods".equals(parser.currentName())) { - parser.nextToken(); - while (parser.nextToken() == XContentParser.Token.VALUE_STRING) { - restApi.addMethod(parser.text()); - } - } - - if ("url".equals(parser.currentName())) { - String currentFieldName = "url"; - int innerLevel = -1; - while(parser.nextToken() != XContentParser.Token.END_OBJECT || innerLevel >= 0) { - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } - - if (parser.currentToken() == XContentParser.Token.START_ARRAY && "paths".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.VALUE_STRING) { - restApi.addPath(parser.text()); - } - } - - if (parser.currentToken() == XContentParser.Token.START_OBJECT && "parts".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - restApi.addPathPart(parser.currentName()); - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new IOException("Expected parts field in rest api definition to contain an object"); - } - parser.skipChildren(); - } - } - - if (parser.currentToken() == XContentParser.Token.START_OBJECT && "params".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - restApi.addParam(parser.currentName()); - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new IOException("Expected params field in rest api definition to contain an object"); - } - parser.skipChildren(); - } - } - - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - innerLevel++; - } - if (parser.currentToken() == XContentParser.Token.END_OBJECT) { - innerLevel--; - } - } - } - - if ("body".equals(parser.currentName())) { - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - boolean requiredFound = false; - while(parser.nextToken() != XContentParser.Token.END_OBJECT) { - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - if ("required".equals(parser.currentName())) { - requiredFound = true; - parser.nextToken(); - if (parser.booleanValue()) { - restApi.setBodyRequired(); - } else { - restApi.setBodyOptional(); - } - } - } - } - if (!requiredFound) { - restApi.setBodyOptional(); - } - } - } - } - - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - level++; - } - if (parser.currentToken() == XContentParser.Token.END_OBJECT) { - level--; - } - - } - - parser.nextToken(); - assert parser.currentToken() == XContentParser.Token.END_OBJECT : "Expected [END_OBJECT] but was [" + parser.currentToken() +"]"; - parser.nextToken(); - - return restApi; - - } finally { - parser.close(); + while ( parser.nextToken() != XContentParser.Token.FIELD_NAME ) { + //move to first field name } - } + RestApi restApi = new RestApi(location, parser.currentName()); + + int level = -1; + while (parser.nextToken() != XContentParser.Token.END_OBJECT || level >= 0) { + + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + if ("methods".equals(parser.currentName())) { + parser.nextToken(); + while (parser.nextToken() == XContentParser.Token.VALUE_STRING) { + restApi.addMethod(parser.text()); + } + } + + if ("url".equals(parser.currentName())) { + String currentFieldName = "url"; + int innerLevel = -1; + while(parser.nextToken() != XContentParser.Token.END_OBJECT || innerLevel >= 0) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } + + if (parser.currentToken() == XContentParser.Token.START_ARRAY && "paths".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.VALUE_STRING) { + restApi.addPath(parser.text()); + } + } + + if (parser.currentToken() == XContentParser.Token.START_OBJECT && "parts".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + restApi.addPathPart(parser.currentName()); + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + throw new IOException("Expected parts field in rest api definition to contain an object"); + } + parser.skipChildren(); + } + } + + if (parser.currentToken() == XContentParser.Token.START_OBJECT && "params".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + restApi.addParam(parser.currentName()); + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + throw new IOException("Expected params field in rest api definition to contain an object"); + } + parser.skipChildren(); + } + } + + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + innerLevel++; + } + if (parser.currentToken() == XContentParser.Token.END_OBJECT) { + innerLevel--; + } + } + } + + if ("body".equals(parser.currentName())) { + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { + boolean requiredFound = false; + while(parser.nextToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + if ("required".equals(parser.currentName())) { + requiredFound = true; + parser.nextToken(); + if (parser.booleanValue()) { + restApi.setBodyRequired(); + } else { + restApi.setBodyOptional(); + } + } + } + } + if (!requiredFound) { + restApi.setBodyOptional(); + } + } + } + } + + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + level++; + } + if (parser.currentToken() == XContentParser.Token.END_OBJECT) { + level--; + } + + } + + parser.nextToken(); + assert parser.currentToken() == XContentParser.Token.END_OBJECT : "Expected [END_OBJECT] but was [" + parser.currentToken() +"]"; + parser.nextToken(); + + return restApi; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java index 2f154728b98..106ff5176c7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java @@ -41,7 +41,11 @@ public class RestSpec { } void addApi(RestApi restApi) { - restApiMap.put(restApi.getName(), restApi); + RestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); + if (previous != null) { + throw new IllegalArgumentException("cannot register api [" + restApi.getName() + "] found in [" + restApi.getLocation() + "]. " + + "api with same name was already found in [" + previous.getLocation() + "]"); + } } public RestApi getApi(String api) { @@ -57,12 +61,20 @@ public class RestSpec { */ public static RestSpec parseFrom(FileSystem fileSystem, String optionalPathPrefix, String... paths) throws IOException { RestSpec restSpec = new RestSpec(); + RestApiParser restApiParser = new RestApiParser(); for (String path : paths) { for (Path jsonFile : FileUtils.findJsonSpec(fileSystem, optionalPathPrefix, path)) { try (InputStream stream = Files.newInputStream(jsonFile)) { - XContentParser parser = JsonXContent.jsonXContent.createParser(stream); - RestApi restApi = new RestApiParser().parse(parser); - restSpec.addApi(restApi); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(stream)) { + RestApi restApi = restApiParser.parse(jsonFile.toString(), parser); + String filename = jsonFile.getFileName().toString(); + String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); + if (restApi.getName().equals(expectedApiName) == false) { + throw new IllegalArgumentException("found api [" + restApi.getName() + "] in [" + jsonFile.toString() + "]. " + + "Each api is expected to have the same name as the file that defines it."); + } + restSpec.addApi(restApi); + } } catch (Throwable ex) { throw new IOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } diff --git a/test/framework/src/main/resources/log4j.properties b/test/framework/src/main/resources/log4j.properties index 22f54ef68e5..87d4560f72f 100644 --- a/test/framework/src/main/resources/log4j.properties +++ b/test/framework/src/main/resources/log4j.properties @@ -1,5 +1,5 @@ -es.logger.level=INFO -log4j.rootLogger=${es.logger.level}, out +tests.es.logger.level=INFO +log4j.rootLogger=${tests.es.logger.level}, out log4j.logger.org.apache.http=INFO, out log4j.additivity.org.apache.http=false diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index ee567aae779..d6cd3eea5ac 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.test.TestSearchContext; public class MockSearchServiceTests extends ESTestCase { public void testAssertNoInFlightContext() { SearchContext s = new TestSearchContext(new QueryShardContext(new IndexSettings(IndexMetaData.PROTO, Settings.EMPTY), null, null, - null, null, null, null, null, null, null, null)) { + null, null, null, null, null, null, null)) { @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("node", new Index("idx", "ignored"), 0); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java index e2f321c81c5..0cd8ee31398 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserFailingTests.java @@ -28,7 +28,7 @@ import java.io.IOException; import static org.hamcrest.Matchers.containsString; /** - * + * These tests are not part of {@link RestApiParserTests} because the tested failures don't allow to consume the whole yaml stream */ public class RestApiParserFailingTests extends ESTestCase { public void testBrokenSpecShouldThrowUsefulExceptionWhenParsingFailsOnParams() throws Exception { @@ -42,7 +42,7 @@ public class RestApiParserFailingTests extends ESTestCase { private void parseAndExpectFailure(String brokenJson, String expectedErrorMessage) throws Exception { XContentParser parser = JsonXContent.jsonXContent.createParser(brokenJson); try { - new RestApiParser().parse(parser); + new RestApiParser().parse("location", parser); fail("Expected to fail parsing but did not happen"); } catch (IOException e) { assertThat(e.getMessage(), containsString(expectedErrorMessage)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java index 262b155c668..d884b327f71 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestApiParserTests.java @@ -29,7 +29,7 @@ import static org.hamcrest.Matchers.notNullValue; public class RestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecIndexApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_INDEX_API); - RestApi restApi = new RestApiParser().parse(parser); + RestApi restApi = new RestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("index")); @@ -51,7 +51,7 @@ public class RestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecGetTemplateApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_GET_TEMPLATE_API); - RestApi restApi = new RestApiParser().parse(parser); + RestApi restApi = new RestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("indices.get_template")); assertThat(restApi.getMethods().size(), equalTo(1)); @@ -68,7 +68,7 @@ public class RestApiParserTests extends AbstractParserTestCase { public void testParseRestSpecCountApi() throws Exception { parser = JsonXContent.jsonXContent.createParser(REST_SPEC_COUNT_API); - RestApi restApi = new RestApiParser().parse(parser); + RestApi restApi = new RestApiParser().parse("location", parser); assertThat(restApi, notNullValue()); assertThat(restApi.getName(), equalTo("count")); assertThat(restApi.getMethods().size(), equalTo(2)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java index b9643dda7c7..4e0623cd134 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/SuiteScopeClusterIT.java @@ -48,7 +48,7 @@ public class SuiteScopeClusterIT extends ESIntegTestCase { SEQUENCE[i] = randomLong(); } } else { - assertEquals(CLUSTER_SEED, new Long(cluster().seed())); + assertEquals(CLUSTER_SEED, Long.valueOf(cluster().seed())); for (int i = 0; i < SEQUENCE.length; i++) { assertThat(SEQUENCE[i], equalTo(randomLong())); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java b/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java index 8c3c18454cf..8ef38305018 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/TestScopeClusterIT.java @@ -42,7 +42,7 @@ public class TestScopeClusterIT extends ESIntegTestCase { SEQUENCE[i] = randomLong(); } } else { - assertEquals(CLUSTER_SEED, new Long(cluster().seed())); + assertEquals(CLUSTER_SEED, Long.valueOf(cluster().seed())); for (int i = 0; i < SEQUENCE.length; i++) { assertThat(SEQUENCE[i], equalTo(randomLong())); }