diff --git a/Vagrantfile b/Vagrantfile index 4f8ee7164f6..5b51f612eb3 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -42,7 +42,7 @@ Vagrant.configure(2) do |config| # debian and it works fine. config.vm.define "debian-8" do |config| config.vm.box = "elastic/debian-8-x86_64" - deb_common config, 'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports' + deb_common config, 'echo deb http://cloudfront.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports' end config.vm.define "centos-6" do |config| config.vm.box = "elastic/centos-6-x86_64" diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java index aba7fda1021..97fbda80dc6 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java @@ -31,7 +31,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; @@ -102,7 +102,7 @@ public final class Allocators { } public static DiscoveryNode newNode(String nodeId, Map attributes) { - return new DiscoveryNode("", nodeId, DummyTransportAddress.INSTANCE, attributes, Sets.newHashSet(DiscoveryNode.Role.MASTER, + return new DiscoveryNode("", nodeId, LocalTransportAddress.buildUnique(), attributes, Sets.newHashSet(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA), Version.CURRENT); } } diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy index 450d3645182..6ed6ecf8619 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestReportLogger.groovy @@ -28,12 +28,6 @@ import org.gradle.api.logging.LogLevel import org.gradle.api.logging.Logger import org.junit.runner.Description -import javax.sound.sampled.AudioSystem -import javax.sound.sampled.Clip -import javax.sound.sampled.Line -import javax.sound.sampled.LineEvent -import javax.sound.sampled.LineListener -import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription @@ -123,36 +117,9 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv formatTime(e.getCurrentTime()) + ", stalled for " + formatDurationInSeconds(e.getNoEventDuration()) + " at: " + (e.getDescription() == null ? "" : formatDescription(e.getDescription()))) - try { - playBeat(); - } catch (Exception nosound) { /* handling exceptions with style */ } slowTestsFound = true } - void playBeat() throws Exception { - Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class)); - final AtomicBoolean stop = new AtomicBoolean(); - clip.addLineListener(new LineListener() { - @Override - public void update(LineEvent event) { - if (event.getType() == LineEvent.Type.STOP) { - stop.set(true); - } - } - }); - InputStream stream = getClass().getResourceAsStream("/beat.wav"); - try { - clip.open(AudioSystem.getAudioInputStream(stream)); - clip.start(); - while (!stop.get()) { - Thread.sleep(20); - } - clip.close(); - } finally { - stream.close(); - } - } - @Subscribe void onQuit(AggregatedQuitEvent e) throws IOException { if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index ba7311fee6f..c7f4316ee04 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -131,8 +131,9 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { } private void response(Snippet response) { - current.println(" - response_body: |") - response.contents.eachLine { current.println(" $it") } + current.println(" - match: ") + current.println(" \$body: ") + response.contents.eachLine { current.println(" $it") } } void emitDo(String method, String pathAndQuery, diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 3bfe9d61018..fedcf6e87d3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -62,6 +62,7 @@ public class RestIntegTestTask extends RandomizedTestingTask { project.gradle.projectsEvaluated { NodeInfo node = ClusterFormationTasks.setup(project, this, clusterConfig) systemProperty('tests.rest.cluster', "${-> node.httpUri()}") + systemProperty('tests.config.dir', "${-> node.confDir}") // TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin // that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass // both as separate sysprops diff --git a/buildSrc/src/main/resources/beat.wav b/buildSrc/src/main/resources/beat.wav deleted file mode 100644 index 4083a4ce618..00000000000 Binary files a/buildSrc/src/main/resources/beat.wav and /dev/null differ diff --git a/buildSrc/src/main/resources/checkstyle.xml b/buildSrc/src/main/resources/checkstyle.xml index de47736913f..706ef46ffa1 100644 --- a/buildSrc/src/main/resources/checkstyle.xml +++ b/buildSrc/src/main/resources/checkstyle.xml @@ -39,6 +39,27 @@ + + + + + waiting for 3 nodes to be up"); - assertBusy(new Runnable() { - @Override - public void run() { - NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); - assertThat(resp.getNodes().size(), equalTo(3)); - } + assertBusy(() -> { + NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); + assertThat(resp.getNodes().size(), equalTo(3)); }); logger.info("--> creating 'test' index"); @@ -126,7 +123,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { Map explanations = cae.getNodeExplanations(); - Float noAttrWeight = -1f; Float barAttrWeight = -1f; Float fooBarAttrWeight = -1f; for (Map.Entry entry : explanations.entrySet()) { @@ -134,7 +130,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { String nodeName = node.getName(); NodeExplanation explanation = entry.getValue(); ClusterAllocationExplanation.FinalDecision finalDecision = explanation.getFinalDecision(); - String finalExplanation = explanation.getFinalExplanation(); ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy(); Decision d = explanation.getDecision(); float weight = explanation.getWeight(); @@ -143,7 +138,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { assertEquals(d.type(), Decision.Type.NO); if (noAttrNode.equals(nodeName)) { assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]")); - noAttrWeight = weight; assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", explanation.getFinalExplanation()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java index d5cefc6d1f3..6c23d1604b8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java @@ -47,7 +47,6 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { NodeExplanation explanation = cae.getNodeExplanations().values().iterator().next(); ClusterAllocationExplanation.FinalDecision fd = explanation.getFinalDecision(); ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy(); - String finalExplanation = explanation.getFinalExplanation(); Decision d = explanation.getDecision(); assertNotNull("should have a decision", d); assertEquals(Decision.Type.NO, d.type()); @@ -76,7 +75,6 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { d = explanation.getDecision(); fd = explanation.getFinalDecision(); storeCopy = explanation.getStoreCopy(); - finalExplanation = explanation.getFinalExplanation(); assertNotNull("should have a decision", d); assertEquals(Decision.Type.NO, d.type()); assertEquals(ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, fd); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java index d0e8ef14d01..5b39c5d34dc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,7 +41,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -66,7 +65,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { .numberOfShards(1) .numberOfReplicas(1) .build(); - private DiscoveryNode node = new DiscoveryNode("node-0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + private DiscoveryNode node = new DiscoveryNode("node-0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); private static Decision.Multi yesDecision = new Decision.Multi(); private static Decision.Multi noDecision = new Decision.Multi(); @@ -205,7 +204,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { "assignedNode", allocationDelay, remainingDelay, null, false, nodeExplanations); BytesStreamOutput out = new BytesStreamOutput(); cae.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); ClusterAllocationExplanation cae2 = new ClusterAllocationExplanation(in); assertEquals(shard, cae2.getShard()); assertTrue(cae2.isPrimary()); @@ -215,9 +214,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { assertEquals(allocationDelay, cae2.getAllocationDelayMillis()); assertEquals(remainingDelay, cae2.getRemainingDelayMillis()); for (Map.Entry entry : cae2.getNodeExplanations().entrySet()) { - DiscoveryNode node = entry.getKey(); NodeExplanation explanation = entry.getValue(); - IndicesShardStoresResponse.StoreStatus status = explanation.getStoreStatus(); assertNotNull(explanation.getStoreStatus()); assertNotNull(explanation.getDecision()); assertEquals(nodeWeight, explanation.getWeight()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index 704c1348b7e..d0d452df478 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -84,7 +84,7 @@ public class ClusterHealthResponsesTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); clusterHealth.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); clusterHealth = ClusterHealthResponse.readResponseFrom(in); } return clusterHealth; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 594028f4e6f..9027b3d372e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -230,7 +230,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwableReference.set(e); responseLatch.countDown(); } @@ -308,7 +308,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwableReference.set(e); responseLatch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 6f8e3fda156..f9045b58413 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -738,12 +738,12 @@ public class TasksIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }); b.await(); - + // Now we can find it! GetTaskResponse response = expectFinishedTask(new TaskId("fake:1")); assertEquals("test", response.getTask().getTask().getAction()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 8f8c10c8dda..122ae910e7f 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -20,8 +20,9 @@ package org.elasticsearch.action.admin.cluster.node.tasks; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; @@ -38,7 +39,6 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; @@ -47,6 +47,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -56,6 +57,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -65,11 +67,12 @@ import static org.elasticsearch.test.ESTestCase.awaitBusy; /** * A plugin that adds a cancellable blocking test task of integration testing of the task manager. */ -public class TestTaskPlugin extends Plugin { +public class TestTaskPlugin extends Plugin implements ActionPlugin { - public void onModule(ActionModule module) { - module.registerAction(TestTaskAction.INSTANCE, TransportTestTaskAction.class); - module.registerAction(UnblockTestTasksAction.INSTANCE, TransportUnblockTestTasksAction.class); + @Override + public List, ? extends ActionResponse>> getActions() { + return Arrays.asList(new ActionHandler<>(TestTaskAction.INSTANCE, TransportTestTaskAction.class), + new ActionHandler<>(UnblockTestTasksAction.INSTANCE, TransportUnblockTestTasksAction.class)); } static class TestTask extends CancellableTask { @@ -261,17 +264,6 @@ public class TestTaskPlugin extends Plugin { return new NodesResponse(clusterService.getClusterName(), responses, failures); } - @Override - protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { - List list = new ArrayList<>(); - for (String node : nodesIds) { - if (nodes.nodeExists(node)) { - list.add(node); - } - } - return list.toArray(new String[list.size()]); - } - @Override protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) { return new NodeRequest(request, nodeId, request.getShouldBlock()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index c4d49d899b9..2c78786ab04 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -249,7 +249,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { /** * Test class for testing task operations */ - static abstract class TestTasksAction extends TransportTasksAction { + abstract static class TestTasksAction extends TransportTasksAction { protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) { @@ -338,7 +338,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.warn("Couldn't get list of tasks", e); responseLatch.countDown(); } @@ -526,7 +526,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { responseLatch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index b736751b781..4f553dfb88a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -166,7 +166,7 @@ public class ClusterRerouteRequestTests extends ESTestCase { private ClusterRerouteRequest roundTripThroughBytes(ClusterRerouteRequest original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { ClusterRerouteRequest copy = new ClusterRerouteRequest(); copy.readFrom(in); return copy; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java index 00fcbf60a5a..657fec558b8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -66,7 +66,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase { BytesReference bytes = out.bytes(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry); - StreamInput wrap = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes.toBytes()), + StreamInput wrap = new NamedWriteableAwareStreamInput(bytes.streamInput(), namedWriteableRegistry); ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest(); deserializedReq.readFrom(wrap); @@ -94,7 +94,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java index fc04de81254..b515829b72a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java @@ -45,7 +45,7 @@ public class ClusterStateRequestTests extends ESTestCase { output.setVersion(testVersion); clusterStateRequest.writeTo(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); streamInput.setVersion(testVersion); ClusterStateRequest deserializedCSRequest = new ClusterStateRequest(); deserializedCSRequest.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java index e4c2849b907..2e9239a2c3b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java @@ -37,7 +37,7 @@ public class GetStoredScriptRequestTests extends ESTestCase { out.setVersion(randomVersion(random())); request.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(out.getVersion()); GetStoredScriptRequest request2 = new GetStoredScriptRequest(); request2.readFrom(in); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index ff7faab1f6a..5e2c503eba1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -29,12 +29,15 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.internal.AllFieldMapper; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.util.List; +import static java.util.Collections.emptyList; + public class TransportAnalyzeActionTests extends ESTestCase { private AnalysisService analysisService; @@ -56,7 +59,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); environment = new Environment(settings); - registry = new AnalysisRegistry(null, environment); + registry = new AnalysisModule(environment, emptyList()).getAnalysisRegistry(); analysisService = registry.build(idxSettings); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 3e7323dceeb..428e859e342 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -41,14 +41,11 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.junit.Ignore; import java.util.HashMap; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; @@ -232,7 +229,7 @@ public class CreateIndexIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index 04f6037f64b..7040c92ec1d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -84,7 +84,7 @@ public class SyncedFlushUnitTests extends ESTestCase { assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); BytesStreamOutput out = new BytesStreamOutput(); testPlan.result.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); SyncedFlushResponse readResponse = new SyncedFlushResponse(); readResponse.readFrom(in); assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 6e3e5d76224..1cd1704e164 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -215,7 +215,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet(); } - private final static class IndexNodePredicate implements Predicate { + private static final class IndexNodePredicate implements Predicate { private final Set nodesWithShard; public IndexNodePredicate(String index) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index 3c12d7d9b10..9705009a044 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -53,8 +53,8 @@ public class IndicesShardStoreResponseTests extends ESTestCase { List failures = new ArrayList<>(); ImmutableOpenIntMap.Builder> storeStatuses = ImmutableOpenIntMap.builder(); - DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); - DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); List storeStatusList = new ArrayList<>(); storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, null, IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); @@ -122,7 +122,7 @@ public class IndicesShardStoreResponseTests extends ESTestCase { } public void testStoreStatusOrdering() throws Exception { - DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); List orderedStoreStatuses = new ArrayList<>(); orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java index d78374d446f..3236de4aaeb 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; @@ -140,7 +140,7 @@ public class TransportShrinkActionTests extends ESTestCase { } private DiscoveryNode newNode(String nodeId) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), Collections.unmodifiableSet(new HashSet<>(Arrays.asList(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA))), Version.CURRENT); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 726dccee597..dfc10169e70 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.CommitStats; @@ -26,6 +27,8 @@ import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.List; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; @@ -108,4 +111,12 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { } } + /** + * Gives access to package private IndicesStatsResponse constructor for test purpose. + **/ + public static IndicesStatsResponse newIndicesStatsResponse(ShardStats[] shards, int totalShards, int successfulShards, + int failedShards, List shardFailures) { + return new IndicesStatsResponse(shards, totalShards, successfulShards, failedShards, shardFailures); + } + } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index d62fe30f6fa..58784fdb7df 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.template.put; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.cluster.metadata.AliasValidator; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -172,8 +171,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable t) { - throwables.add(t); + public void onFailure(Exception e) { + throwables.add(e); } }); return throwables; @@ -205,8 +204,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable t) { - throwables.add(t); + public void onFailure(Exception e) { + throwables.add(e); latch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java new file mode 100644 index 00000000000..e3cfeb2a4ac --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +public class BackoffPolicyTests extends ESTestCase { + public void testWrapBackoffPolicy() { + TimeValue timeValue = timeValueMillis(between(0, Integer.MAX_VALUE)); + int maxNumberOfRetries = between(1, 1000); + BackoffPolicy policy = BackoffPolicy.constantBackoff(timeValue, maxNumberOfRetries); + AtomicInteger retries = new AtomicInteger(); + policy = BackoffPolicy.wrap(policy, retries::getAndIncrement); + + int expectedRetries = 0; + { + // Fetching the iterator doesn't call the callback + Iterator itr = policy.iterator(); + assertEquals(expectedRetries, retries.get()); + + while (itr.hasNext()) { + // hasNext doesn't trigger the callback + assertEquals(expectedRetries, retries.get()); + // next does + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + // next doesn't call the callback when there isn't a backoff available + expectThrows(NoSuchElementException.class, () -> itr.next()); + assertEquals(expectedRetries, retries.get()); + } + { + // The second iterator also calls the callback + Iterator itr = policy.iterator(); + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 337f881d41b..142fb282c20 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -55,9 +55,9 @@ public class BulkRequestTests extends ESTestCase { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); assertThat(bulkRequest.numberOfActions(), equalTo(3)); - assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes())); + assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }"))); assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class)); - assertThat(((IndexRequest) bulkRequest.requests().get(2)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }").toBytes())); + assertThat(((IndexRequest) bulkRequest.requests().get(2)).source(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }"))); } public void testSimpleBulk2() throws Exception { @@ -81,7 +81,7 @@ public class BulkRequestTests extends ESTestCase { assertThat(bulkRequest.numberOfActions(), equalTo(4)); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); - assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().toUtf8(), equalTo("{\"field\":\"value\"}")); + assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1")); @@ -93,7 +93,7 @@ public class BulkRequestTests extends ESTestCase { assertThat(scriptParams, notNullValue()); assertThat(scriptParams.size(), equalTo(1)); assertThat(((Integer) scriptParams.get("param1")), equalTo(1)); - assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().toUtf8(), equalTo("{\"counter\":1}")); + assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}")); } public void testBulkAllowExplicitIndex() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java index b26d2531ff0..bb406366d25 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java @@ -29,11 +29,11 @@ public class BulkShardRequestTests extends ESTestCase { public void testToString() { String index = randomSimpleString(random(), 10); int count = between(1, 100); - BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.NONE, new BulkItemRequest[count]); + BulkShardRequest r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.NONE, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests", r.toString()); - r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.IMMEDIATE, new BulkItemRequest[count]); + r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.IMMEDIATE, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests and a refresh", r.toString()); - r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.WAIT_UNTIL, new BulkItemRequest[count]); + r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.WAIT_UNTIL, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests blocking until refresh", r.toString()); } } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 6d9987394f9..4fa640b3adc 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -149,7 +149,7 @@ public class RetryTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { this.lastFailure = e; latch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 6ae7559ba62..7c39adc76f6 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -59,7 +59,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class TransportBulkActionTookTests extends ESTestCase { - static private ThreadPool threadPool; + private static ThreadPool threadPool; private ClusterService clusterService; @BeforeClass @@ -201,7 +201,7 @@ public class TransportBulkActionTookTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }); diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java index 451ade62584..ef259463139 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java @@ -70,7 +70,7 @@ public class MultiGetShardRequestTests extends ESTestCase { out.setVersion(randomVersion(random())); multiGetShardRequest.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(out.getVersion()); MultiGetShardRequest multiGetShardRequest2 = new MultiGetShardRequest(); multiGetShardRequest2.readFrom(in); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java index 0bb28df406b..9adb51e6c44 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java @@ -111,7 +111,7 @@ public class BulkRequestModifierTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }); @@ -157,7 +157,7 @@ public class BulkRequestModifierTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } public BulkResponse getResponse() { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index a62946bf0f6..3d1a1a1c69d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -78,7 +78,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { if (i < ingestNodes) { roles.add(DiscoveryNode.Role.INGEST); } - DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, roles, VersionUtils.randomVersion(random())); + DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, LocalTransportAddress.buildUnique(), attributes, roles, VersionUtils.randomVersion(random())); builder.put(node); if (i == totalNodes - 1) { localNode = node; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java index 323a8c0aaa6..544e2932b44 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java @@ -45,7 +45,7 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); simulateDocumentBaseResult.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); if (isFailure) { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 1376ca4280e..576e8e01724 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -73,7 +73,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(); otherResponse.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index f612f36c9d6..ccf3a674944 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -48,7 +48,7 @@ public class SimulateProcessorResultTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput); assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag())); if (isFailure) { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java index 3f252c37072..00327603ba8 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java @@ -35,7 +35,7 @@ public class WritePipelineResponseTests extends ESTestCase { response = new WritePipelineResponse(isAcknowledged); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WritePipelineResponse otherResponse = new WritePipelineResponse(); otherResponse.readFrom(streamInput); @@ -46,7 +46,7 @@ public class WritePipelineResponseTests extends ESTestCase { WritePipelineResponse response = new WritePipelineResponse(); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WritePipelineResponse otherResponse = new WritePipelineResponse(); otherResponse.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index a7ce842913d..b4908846e97 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -112,7 +112,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); writeableIngestDocument.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(streamInput); assertIngestDocument(otherWriteableIngestDocument.getIngestDocument(), writeableIngestDocument.getIngestDocument()); } diff --git a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 2bff71d3c40..a8c550e01c5 100644 --- a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -64,7 +64,7 @@ public class MainActionTests extends ESTestCase { BytesStreamOutput streamOutput = new BytesStreamOutput(); mainResponse.writeTo(streamOutput); final MainResponse serialized = new MainResponse(); - serialized.readFrom(new ByteBufferStreamInput(ByteBuffer.wrap(streamOutput.bytes().toBytes()))); + serialized.readFrom(streamOutput.bytes().streamInput()); assertThat(serialized.getNodeName(), equalTo(nodeName)); assertThat(serialized.getClusterName(), equalTo(clusterName)); @@ -121,7 +121,7 @@ public class MainActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.error("unexpected error", e); } }); diff --git a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index d5ed5302b97..d656e0f62a9 100644 --- a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -38,7 +38,7 @@ public class IndicesOptionsTests extends ESTestCase { output.setVersion(outputVersion); indicesOptions.writeIndicesOptions(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); streamInput.setVersion(randomVersion(random())); IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java b/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java index 80492f0be61..8169a674bed 100644 --- a/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java @@ -45,15 +45,15 @@ public class ListenableActionFutureTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { error.set(e); listenerCalled.countDown(); } }); Thread networkThread = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - error.set(t); + public void onFailure(Exception e) { + error.set(e); listenerCalled.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 00068c05efe..bbf1d2f1942 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -41,6 +41,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.IntStream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -102,8 +104,8 @@ public class TransportActionFilterChainTests extends ESTestCase { try { assertThat(future.get(), notNullValue()); assertThat("shouldn't get here if an error is expected", errorExpected, equalTo(false)); - } catch(Throwable t) { - assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); + } catch (ExecutionException e) { + assertThat("shouldn't get here if an error is not expected " + e.getMessage(), errorExpected, equalTo(true)); } List testFiltersByLastExecution = new ArrayList<>(); @@ -182,8 +184,8 @@ public class TransportActionFilterChainTests extends ESTestCase { try { assertThat(future.get(), notNullValue()); assertThat("shouldn't get here if an error is expected", errorExpected, equalTo(false)); - } catch(Throwable t) { - assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); + } catch(ExecutionException e) { + assertThat("shouldn't get here if an error is not expected " + e.getMessage(), errorExpected, equalTo(true)); } List testFiltersByLastExecution = new ArrayList<>(); @@ -252,7 +254,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { failures.add(e); latch.countDown(); } @@ -309,7 +311,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { failures.add(e); latch.countDown(); } @@ -445,12 +447,12 @@ public class TransportActionFilterChainTests extends ESTestCase { } } - private static interface RequestCallback { + private interface RequestCallback { , Response extends ActionResponse> void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain); } - private static interface ResponseCallback { + private interface ResponseCallback { void execute(String action, Response response, ActionListener listener, ActionFilterChain chain); } diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 1d65f277e3c..603ad664ec3 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -49,7 +49,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -247,7 +247,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } static DiscoveryNode newNode(int nodeId) { - return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + return new DiscoveryNode("node_" + nodeId, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); } @AfterClass @@ -491,7 +491,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { + public void sendResponse(Exception exception) throws IOException { } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 32fe6b1e408..5d01aa369b0 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -89,9 +89,9 @@ public class TransportMasterNodeActionTests extends ESTestCase { transportService = new TransportService(clusterService.getSettings(), transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); - localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + localNode = new DiscoveryNode("local_node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); - remoteNode = new DiscoveryNode("remote_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + remoteNode = new DiscoveryNode("remote_node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); allNodes = new DiscoveryNode[]{localNode, remoteNode}; } @@ -136,7 +136,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { @Override protected void doExecute(Task task, final Request request, ActionListener listener) { // remove unneeded threading by wrapping listener with SAME to prevent super.doExecute from wrapping it with LISTENER - super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); + super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener, false)); } @Override @@ -167,7 +167,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Request request = new Request(); PlainActionFuture listener = new PlainActionFuture<>(); - final Throwable exception = new Throwable(); + final Exception exception = new Exception(); final Response response = new Response(); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); @@ -244,7 +244,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Request request = new Request(); PlainActionFuture listener = new PlainActionFuture<>(); - setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(null, localNode, remoteNode), allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(localNode, remoteNode, null), allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -342,7 +342,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { // The other node has become master, simulate failures of this node while publishing cluster state through ZenDiscovery setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); - Throwable failure = randomBoolean() + Exception failure = randomBoolean() ? new Discovery.FailedToCommitClusterStateException("Fake error") : new NotMasterException("Fake error"); listener.onFailure(failure); diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index 626850fd119..ae8ea4a0b95 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -96,7 +96,7 @@ public class TransportNodesActionTests extends ESTestCase { TestNodesRequest request = new TestNodesRequest(finalNodesIds); action.new AsyncAction(null, request, new PlainActionFuture<>()).start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); - assertEquals(clusterService.state().nodes().resolveNodesIds(finalNodesIds).length, capturedRequests.size()); + assertEquals(clusterService.state().nodes().resolveNodes(finalNodesIds).length, capturedRequests.size()); } public void testNewResponseNullArray() { @@ -129,9 +129,9 @@ public class TransportNodesActionTests extends ESTestCase { assertTrue(failures.containsAll(response.failures())); } - public void testFiltering() throws Exception { - TransportNodesAction action = getFilteringTestTransportNodesAction(transportService); - TestNodesRequest request = new TestNodesRequest(); + public void testCustomResolving() throws Exception { + TransportNodesAction action = getDataNodesOnlyTransportNodesAction(transportService); + TestNodesRequest request = new TestNodesRequest(randomBoolean() ? null : generateRandomStringArray(10, 5, false, true)); PlainActionFuture listener = new PlainActionFuture<>(); action.new AsyncAction(null, request, listener).start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); @@ -221,8 +221,8 @@ public class TransportNodesActionTests extends ESTestCase { ); } - public FilteringTestTransportNodesAction getFilteringTestTransportNodesAction(TransportService transportService) { - return new FilteringTestTransportNodesAction( + public DataNodesOnlyTransportNodesAction getDataNodesOnlyTransportNodesAction(TransportService transportService) { + return new DataNodesOnlyTransportNodesAction( Settings.EMPTY, THREAD_POOL, clusterService, @@ -236,7 +236,7 @@ public class TransportNodesActionTests extends ESTestCase { private static DiscoveryNode newNode(int nodeId, Map attributes, Set roles) { String node = "node_" + nodeId; - return new DiscoveryNode(node, node, DummyTransportAddress.INSTANCE, attributes, roles, Version.CURRENT); + return new DiscoveryNode(node, node, LocalTransportAddress.buildUnique(), attributes, roles, Version.CURRENT); } private static class TestTransportNodesAction @@ -276,18 +276,18 @@ public class TransportNodesActionTests extends ESTestCase { } } - private static class FilteringTestTransportNodesAction + private static class DataNodesOnlyTransportNodesAction extends TestTransportNodesAction { - FilteringTestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService + DataNodesOnlyTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor) { super(settings, threadPool, clusterService, transportService, actionFilters, request, nodeRequest, nodeExecutor); } @Override - protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) { - return nodes.getDataNodes().keys().toArray(String.class); + protected void resolveRequest(TestNodesRequest request, ClusterState clusterState) { + request.setConcreteNodes(clusterState.nodes().getDataNodes().values().toArray(DiscoveryNode.class)); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index dc40fda3f8e..7496bb85faf 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import java.util.Arrays; @@ -220,7 +220,6 @@ public class ClusterStateCreationUtils { * Creates a cluster state with no index */ public static ClusterState stateWithNoShard() { - int numberOfNodes = 2; DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); discoBuilder.localNodeId(newNode(0).getId()); discoBuilder.masterNodeId(newNode(1).getId()); @@ -256,11 +255,11 @@ public class ClusterStateCreationUtils { } private static DiscoveryNode newNode(int nodeId) { - return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + return new DiscoveryNode("node_" + nodeId, LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); } - static private String selectAndRemove(Set strings) { + private static String selectAndRemove(Set strings) { String selection = randomFrom(strings.toArray(new String[strings.size()])); strings.remove(selection); return selection; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index 1ef951e1ba8..f43f39c732a 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -80,11 +80,11 @@ public class ReplicationOperationTests extends ESTestCase { final Set expectedReplicas = getExpectedReplicas(shardId, state); - final Map expectedFailures = new HashMap<>(); + final Map expectedFailures = new HashMap<>(); final Set expectedFailedShards = new HashSet<>(); for (ShardRouting replica : expectedReplicas) { if (randomBoolean()) { - Throwable t; + Exception t; boolean criticalFailure = randomBoolean(); if (criticalFailure) { t = new CorruptIndexException("simulated", (String) null); @@ -169,7 +169,7 @@ public class ReplicationOperationTests extends ESTestCase { final Set expectedReplicas = getExpectedReplicas(shardId, state); - final Map expectedFailures = new HashMap<>(); + final Map expectedFailures = new HashMap<>(); final ShardRouting failedReplica = randomFrom(new ArrayList<>(expectedReplicas)); expectedFailures.put(failedReplica, new CorruptIndexException("simulated", (String) null)); @@ -178,9 +178,9 @@ public class ReplicationOperationTests extends ESTestCase { final ClusterState finalState = state; final TestReplicaProxy replicasProxy = new TestReplicaProxy(expectedFailures) { @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, - Runnable onSuccess, Consumer onPrimaryDemoted, - Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, + Runnable onSuccess, Consumer onPrimaryDemoted, + Consumer onIgnoredFailure) { assertThat(replica, equalTo(failedReplica)); onPrimaryDemoted.accept(new ElasticsearchException("the king is dead")); } @@ -188,7 +188,7 @@ public class ReplicationOperationTests extends ESTestCase { AtomicBoolean primaryFailed = new AtomicBoolean(); final TestPrimary primary = new TestPrimary(primaryShard, primaryTerm) { @Override - public void failShard(String message, Throwable throwable) { + public void failShard(String message, Exception exception) { assertTrue(primaryFailed.compareAndSet(false, true)); } }; @@ -382,8 +382,8 @@ public class ReplicationOperationTests extends ESTestCase { } @Override - public void failShard(String message, Throwable throwable) { - throw new AssertionError("should shouldn't be failed with [" + message + "]", throwable); + public void failShard(String message, Exception exception) { + throw new AssertionError("should shouldn't be failed with [" + message + "]", exception); } @Override @@ -451,7 +451,7 @@ public class ReplicationOperationTests extends ESTestCase { static class TestReplicaProxy implements ReplicationOperation.Replicas { - final Map opFailures; + final Map opFailures; final Set failedReplicas = ConcurrentCollections.newConcurrentSet(); @@ -461,7 +461,7 @@ public class ReplicationOperationTests extends ESTestCase { this(Collections.emptyMap()); } - TestReplicaProxy(Map opFailures) { + TestReplicaProxy(Map opFailures) { this.opFailures = opFailures; } @@ -480,8 +480,8 @@ public class ReplicationOperationTests extends ESTestCase { } @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, Runnable onSuccess, - Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, Runnable onSuccess, + Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { if (failedReplicas.add(replica) == false) { fail("replica [" + replica + "] was failed twice"); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 4a5d149005e..6e7b71f4d83 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.EngineClosedException; +import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardClosedException; import org.elasticsearch.index.shard.ShardId; @@ -391,7 +392,15 @@ public class TransportReplicationActionTests extends ESTestCase { PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); AtomicBoolean executed = new AtomicBoolean(); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + + ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard(); + boolean executeOnPrimary = true; + // whether shard has been marked as relocated already (i.e. relocation completed) + if (primaryShard.relocating() && randomBoolean()) { + isRelocated.set(true); + executeOnPrimary = false; + } + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, @@ -404,15 +413,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard(); - boolean executeOnPrimary = true; - // whether shard has been marked as relocated already (i.e. relocation completed) - if (primaryShard.relocating() && randomBoolean()) { - isRelocated.set(true); - executeOnPrimary = false; - } - primaryPhase.messageReceived(request, createTransportChannel(listener), task); + }.run(); if (executeOnPrimary) { assertTrue(executed.get()); assertTrue(listener.isDone()); @@ -446,7 +447,7 @@ public class TransportReplicationActionTests extends ESTestCase { PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); AtomicBoolean executed = new AtomicBoolean(); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, @@ -459,8 +460,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - primaryPhase.messageReceived(request, createTransportChannel(listener), task); + }.run(); assertThat(executed.get(), equalTo(true)); assertPhase(task, "finished"); } @@ -582,16 +582,18 @@ public class TransportReplicationActionTests extends ESTestCase { metaData.put(IndexMetaData.builder(metaData.get(index)).settings(settings)); state = ClusterState.builder(state).metaData(metaData).build(); setState(clusterService, state); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + AtomicBoolean executed = new AtomicBoolean(); + action.new AsyncPrimaryAction(new Request(shardId), createTransportChannel(new PlainActionFuture<>()), null) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, boolean executeOnReplicas) { assertFalse(executeOnReplicas); + assertFalse(executed.getAndSet(true)); return new NoopReplicationOperation(request, actionListener); } - }; - primaryPhase.messageReceived(new Request(shardId), createTransportChannel(new PlainActionFuture<>()), null); + }.run(); + assertThat(executed.get(), equalTo(true)); } public void testSeqNoIsSetOnPrimary() throws Exception { @@ -620,8 +622,8 @@ public class TransportReplicationActionTests extends ESTestCase { Action action = new Action(Settings.EMPTY, "testSeqNoIsSetOnPrimary", transportService, clusterService, threadPool) { @Override - protected PrimaryShardReference getPrimaryShardReference(ShardId shardId) { - return new PrimaryShardReference(shard, releasable); + protected void acquirePrimaryShardReference(ShardId shardId, ActionListener onReferenceAcquired) { + onReferenceAcquired.onResponse(new PrimaryShardReference(shard, releasable)); } }; @@ -646,17 +648,16 @@ public class TransportReplicationActionTests extends ESTestCase { final boolean throwExceptionOnCreation = i == 1; final boolean throwExceptionOnRun = i == 2; final boolean respondWithError = i == 3; - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { - + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, - ActionListener listener, Action.PrimaryShardReference primaryShardReference, + ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, boolean executeOnReplicas) { assertIndexShardCounter(1); if (throwExceptionOnCreation) { throw new ElasticsearchException("simulated exception, during createReplicatedOperation"); } - return new NoopReplicationOperation(request, listener) { + return new NoopReplicationOperation(request, actionListener) { @Override public void execute() throws Exception { assertIndexShardCounter(1); @@ -671,18 +672,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - try { - primaryPhase.messageReceived(request, createTransportChannel(listener), task); - } catch (ElasticsearchException e) { - if (throwExceptionOnCreation || throwExceptionOnRun) { - assertThat(e.getMessage(), containsString("simulated")); - assertIndexShardCounter(0); - return; // early terminate - } else { - throw e; - } - } + }.run(); assertIndexShardCounter(0); assertTrue(listener.isDone()); assertPhase(task, "finished"); @@ -690,7 +680,7 @@ public class TransportReplicationActionTests extends ESTestCase { try { listener.get(); } catch (ExecutionException e) { - if (respondWithError) { + if (throwExceptionOnCreation || throwExceptionOnRun || respondWithError) { Throwable cause = e.getCause(); assertThat(cause, instanceOf(ElasticsearchException.class)); assertThat(cause.getMessage(), containsString("simulated")); @@ -829,16 +819,16 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected PrimaryShardReference getPrimaryShardReference(ShardId shardId) { + protected void acquirePrimaryShardReference(ShardId shardId, ActionListener onReferenceAcquired) { count.incrementAndGet(); - return new PrimaryShardReference(null, null) { + PrimaryShardReference primaryShardReference = new PrimaryShardReference(null, null) { @Override public boolean isRelocated() { return isRelocated.get(); } @Override - public void failShard(String reason, @Nullable Throwable e) { + public void failShard(String reason, @Nullable Exception e) { throw new UnsupportedOperationException(); } @@ -854,13 +844,35 @@ public class TransportReplicationActionTests extends ESTestCase { public void close() { count.decrementAndGet(); } - }; + + onReferenceAcquired.onResponse(primaryShardReference); } - protected Releasable acquireReplicaOperationLock(ShardId shardId, long primaryTerm) { + @Override + protected void acquireReplicaOperationLock(ShardId shardId, long primaryTerm, ActionListener onLockAcquired) { count.incrementAndGet(); - return count::decrementAndGet; + + ShardReference shardReference = new ShardReference(null, null) { + @Override + public long getLocalCheckpoint() { + return SequenceNumbersService.NO_OPS_PERFORMED; + } + + @Override + public ShardRouting routingEntry() { + ShardRouting shardRouting = clusterService.state().getRoutingTable().shardRoutingTable(shardId).replicaShards().get(0); + assert shardRouting != null; + return shardRouting; + } + + @Override + public void close() { + count.decrementAndGet(); + } + }; + + onLockAcquired.onResponse(shardReference); } } @@ -907,9 +919,9 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { - consumer.accept(error); - listener.onFailure(error); + public void sendResponse(Exception exception) throws IOException { + consumer.accept(exception); + listener.onFailure(exception); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 7b312959631..80e689743fd 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -179,7 +179,7 @@ public class TransportWriteActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index c26d376b587..37abc4d5eed 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -179,9 +179,9 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { action.new AsyncSingleAction(request, listener).start(); listener.get(); fail("expected ClusterBlockException"); - } catch (Throwable t) { - if (ExceptionsHelper.unwrap(t, ClusterBlockException.class) == null) { - logger.info("expected ClusterBlockException but got ", t); + } catch (Exception e) { + if (ExceptionsHelper.unwrap(e, ClusterBlockException.class) == null) { + logger.info("expected ClusterBlockException but got ", e); fail("expected ClusterBlockException"); } } @@ -317,9 +317,9 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { assertThat(transport.capturedRequests().length, equalTo(0)); try { listener.get(); - } catch (Throwable t) { - if (ExceptionsHelper.unwrap(t, IllegalStateException.class) == null) { - logger.info("expected IllegalStateException but got ", t); + } catch (Exception e) { + if (ExceptionsHelper.unwrap(e, IllegalStateException.class) == null) { + logger.info("expected IllegalStateException but got ", e); fail("expected and IllegalStateException"); } } diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 208945a6179..d9f351120b2 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -68,10 +68,10 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { protected static class TestFieldSetting { - final public String name; - final public boolean storedOffset; - final public boolean storedPayloads; - final public boolean storedPositions; + public final String name; + public final boolean storedOffset; + public final boolean storedPayloads; + public final boolean storedPositions; public TestFieldSetting(String name, boolean storedOffset, boolean storedPayloads, boolean storedPositions) { this.name = name; @@ -124,9 +124,9 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } protected static class TestDoc { - final public String id; - final public TestFieldSetting[] fieldSettings; - final public String[] fieldContent; + public final String id; + public final TestFieldSetting[] fieldSettings; + public final String[] fieldContent; public String index = "test"; public String alias = "alias"; public String type = "type1"; @@ -163,11 +163,11 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } protected static class TestConfig { - final public TestDoc doc; - final public String[] selectedFields; - final public boolean requestPositions; - final public boolean requestOffsets; - final public boolean requestPayloads; + public final TestDoc doc; + public final String[] selectedFields; + public final boolean requestPositions; + public final boolean requestOffsets; + public final boolean requestPayloads; public Class expectedException = null; public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) { diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java index 37a1bc92e9c..1611c63d2ba 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java @@ -140,7 +140,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, null); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; @@ -196,7 +196,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, null); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; @@ -255,7 +255,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, ToXContent.EMPTY_PARAMS); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 12af9f8a2c2..d8fd7916b5a 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.FieldMapper; -import org.hamcrest.Matcher; import java.io.IOException; import java.util.ArrayList; @@ -55,7 +54,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -391,19 +389,15 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings); for (TestConfig test : testConfigs) { - try { - TermVectorsRequestBuilder request = getRequestForConfig(test); - if (test.expectedException != null) { - assertThrows(request, test.expectedException); - continue; - } - - TermVectorsResponse response = request.get(); - Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); - validateResponse(response, luceneTermVectors, test); - } catch (Throwable t) { - throw new Exception("Test exception while running " + test.toString(), t); + TermVectorsRequestBuilder request = getRequestForConfig(test); + if (test.expectedException != null) { + assertThrows(request, test.expectedException); + continue; } + + TermVectorsResponse response = request.get(); + Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); + validateResponse(response, luceneTermVectors, test); } } @@ -963,21 +957,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { return randomBoolean() ? "test" : "alias"; } - private Map getFieldStatistics(Map stats, String fieldName) throws IOException { - return (Map) ((Map) stats.get(fieldName)).get("field_statistics"); - } - - private Map getTermStatistics(Map stats, String fieldName, String term) { - return (Map) ((Map) ((Map) stats.get(fieldName)).get("terms")).get(term); - } - - private Matcher equalOrLessThanTo(Integer value, boolean isEqual) { - if (isEqual) { - return equalTo(value); - } - return lessThan(value); - } - public void testTermVectorsWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put("index.refresh_interval", -1))); diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java index 57a89c82cc8..5ed4f3252d5 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java @@ -56,21 +56,16 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase { for (int i = 0; i < testConfigs.length; i++) { TestConfig test = testConfigs[i]; - try { - MultiTermVectorsItemResponse item = responseItems[i]; - if (test.expectedException != null) { - assertTrue(item.isFailed()); - continue; - } else if (item.isFailed()) { - fail(item.getFailure().getCause().getMessage()); - } - Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); - validateResponse(item.getResponse(), luceneTermVectors, test); - } catch (Throwable t) { - throw new Exception("Test exception while running " + test.toString(), t); + MultiTermVectorsItemResponse item = responseItems[i]; + if (test.expectedException != null) { + assertTrue(item.isFailed()); + continue; + } else if (item.isFailed()) { + fail(item.getFailure().getCause().getMessage()); } + Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); + validateResponse(item.getResponse(), luceneTermVectors, test); } - } public void testMissingIndexThrowsMissingIndex() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 597a2a4db39..d105a4bf63b 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -135,7 +135,7 @@ public class UpdateRequestTests extends ESTestCase { TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); Settings settings = settings(Version.CURRENT).build(); - UpdateHelper updateHelper = new UpdateHelper(settings, null, null); + UpdateHelper updateHelper = new UpdateHelper(settings, null); // We just upsert one document with ttl IndexRequest indexRequest = new IndexRequest("test", "type1", "1") diff --git a/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java index ffe82f9388d..699b919cf05 100644 --- a/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -141,15 +141,6 @@ public class SimpleBlocksIT extends ESIntegTestCase { } } - private void canNotIndexExists(String index) { - try { - IndicesExistsResponse r = client().admin().indices().prepareExists(index).execute().actionGet(); - fail(); - } catch (ClusterBlockException e) { - // all is well - } - } - private void setIndexReadOnly(String index, Object value) { HashMap newSettings = new HashMap<>(); newSettings.put(IndexMetaData.SETTING_READ_ONLY, value); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index 86965a679da..b48fcc78c6c 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.bootstrap; import org.apache.lucene.util.Constants; +import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.test.ESTestCase; @@ -46,6 +47,7 @@ public class MaxMapCountCheckTests extends ESTestCase { } } + @SuppressLoggerChecks(reason = "mock usage") public void testGetMaxMapCount() throws IOException { final long procSysVmMaxMapCount = randomIntBetween(1, Integer.MAX_VALUE); final BufferedReader reader = mock(BufferedReader.class); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 40995ff778b..be0848ed4dc 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { // This pattern match characters with Line_Break = Complex_Content. - final static Pattern complexUnicodeChars = Pattern.compile("[\u17B4\u17B5\u17D3\u17CB-\u17D1\u17DD\u1036\u17C6\u1A74\u1038\u17C7\u0E4E\u0E47-\u0E4D\u0EC8-\u0ECD\uAABF\uAAC1\u1037\u17C8-\u17CA\u1A75-\u1A7C\u1AA8-\u1AAB\uAADE\uAADF\u1AA0-\u1AA6\u1AAC\u1AAD\u109E\u109F\uAA77-\uAA79\u0E46\u0EC6\u17D7\u1AA7\uA9E6\uAA70\uAADD\u19DA\u0E01-\u0E3A\u0E40-\u0E45\u0EDE\u0E81\u0E82\u0E84\u0E87\u0E88\u0EAA\u0E8A\u0EDF\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAB\u0EDC\u0EDD\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\uAA80-\uAABE\uAAC0\uAAC2\uAADB\uAADC\u1000\u1075\u1001\u1076\u1002\u1077\uAA60\uA9E9\u1003\uA9E0\uA9EA\u1004\u105A\u1005\u1078\uAA61\u1006\uA9E1\uAA62\uAA7E\u1007\uAA63\uA9EB\u1079\uAA72\u1008\u105B\uA9E2\uAA64\uA9EC\u1061\uAA7F\u1009\u107A\uAA65\uA9E7\u100A\u100B\uAA66\u100C\uAA67\u100D\uAA68\uA9ED\u100E\uAA69\uA9EE\u100F\u106E\uA9E3\uA9EF\u1010-\u1012\u107B\uA9FB\u1013\uAA6A\uA9FC\u1014\u107C\uAA6B\u105E\u1015\u1016\u107D\u107E\uAA6F\u108E\uA9E8\u1017\u107F\uA9FD\u1018\uA9E4\uA9FE\u1019\u105F\u101A\u103B\u101B\uAA73\uAA7A\u103C\u101C\u1060\u101D\u103D\u1082\u1080\u1050\u1051\u1065\u101E\u103F\uAA6C\u101F\u1081\uAA6D\u103E\uAA6E\uAA71\u1020\uA9FA\u105C\u105D\u106F\u1070\u1066\u1021-\u1026\u1052-\u1055\u1027-\u102A\u102C\u102B\u1083\u1072\u109C\u102D\u1071\u102E\u1033\u102F\u1073\u1074\u1030\u1056-\u1059\u1031\u1084\u1035\u1085\u1032\u109D\u1034\u1062\u1067\u1068\uA9E5\u1086\u1039\u103A\u1063\u1064\u1069-\u106D\u1087\u108B\u1088\u108C\u108D\u1089\u108A\u108F\u109A\u109B\uAA7B-\uAA7D\uAA74-\uAA76\u1780-\u17A2\u17DC\u17A3-\u17B3\u17B6-\u17C5\u17D2\u1950-\u196D\u1970-\u1974\u1980-\u199C\u19DE\u19DF\u199D-\u19AB\u19B0-\u19C9\u1A20-\u1A26\u1A58\u1A59\u1A27-\u1A3B\u1A5A\u1A5B\u1A3C-\u1A46\u1A54\u1A47-\u1A4C\u1A53\u1A6B\u1A55-\u1A57\u1A5C-\u1A5E\u1A4D-\u1A52\u1A61\u1A6C\u1A62-\u1A6A\u1A6E\u1A6F\u1A73\u1A70-\u1A72\u1A6D\u1A60]"); + static final Pattern complexUnicodeChars = Pattern.compile("[\u17B4\u17B5\u17D3\u17CB-\u17D1\u17DD\u1036\u17C6\u1A74\u1038\u17C7\u0E4E\u0E47-\u0E4D\u0EC8-\u0ECD\uAABF\uAAC1\u1037\u17C8-\u17CA\u1A75-\u1A7C\u1AA8-\u1AAB\uAADE\uAADF\u1AA0-\u1AA6\u1AAC\u1AAD\u109E\u109F\uAA77-\uAA79\u0E46\u0EC6\u17D7\u1AA7\uA9E6\uAA70\uAADD\u19DA\u0E01-\u0E3A\u0E40-\u0E45\u0EDE\u0E81\u0E82\u0E84\u0E87\u0E88\u0EAA\u0E8A\u0EDF\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAB\u0EDC\u0EDD\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\uAA80-\uAABE\uAAC0\uAAC2\uAADB\uAADC\u1000\u1075\u1001\u1076\u1002\u1077\uAA60\uA9E9\u1003\uA9E0\uA9EA\u1004\u105A\u1005\u1078\uAA61\u1006\uA9E1\uAA62\uAA7E\u1007\uAA63\uA9EB\u1079\uAA72\u1008\u105B\uA9E2\uAA64\uA9EC\u1061\uAA7F\u1009\u107A\uAA65\uA9E7\u100A\u100B\uAA66\u100C\uAA67\u100D\uAA68\uA9ED\u100E\uAA69\uA9EE\u100F\u106E\uA9E3\uA9EF\u1010-\u1012\u107B\uA9FB\u1013\uAA6A\uA9FC\u1014\u107C\uAA6B\u105E\u1015\u1016\u107D\u107E\uAA6F\u108E\uA9E8\u1017\u107F\uA9FD\u1018\uA9E4\uA9FE\u1019\u105F\u101A\u103B\u101B\uAA73\uAA7A\u103C\u101C\u1060\u101D\u103D\u1082\u1080\u1050\u1051\u1065\u101E\u103F\uAA6C\u101F\u1081\uAA6D\u103E\uAA6E\uAA71\u1020\uA9FA\u105C\u105D\u106F\u1070\u1066\u1021-\u1026\u1052-\u1055\u1027-\u102A\u102C\u102B\u1083\u1072\u109C\u102D\u1071\u102E\u1033\u102F\u1073\u1074\u1030\u1056-\u1059\u1031\u1084\u1035\u1085\u1032\u109D\u1034\u1062\u1067\u1068\uA9E5\u1086\u1039\u103A\u1063\u1064\u1069-\u106D\u1087\u108B\u1088\u108C\u108D\u1089\u108A\u108F\u109A\u109B\uAA7B-\uAA7D\uAA74-\uAA76\u1780-\u17A2\u17DC\u17A3-\u17B3\u17B6-\u17C5\u17D2\u1950-\u196D\u1970-\u1974\u1980-\u199C\u19DE\u19DF\u199D-\u19AB\u19B0-\u19C9\u1A20-\u1A26\u1A58\u1A59\u1A27-\u1A3B\u1A5A\u1A5B\u1A3C-\u1A46\u1A54\u1A47-\u1A4C\u1A53\u1A6B\u1A55-\u1A57\u1A5C-\u1A5E\u1A4D-\u1A52\u1A61\u1A6C\u1A62-\u1A6A\u1A6E\u1A6F\u1A73\u1A70-\u1A72\u1A6D\u1A60]"); /** * Simple upgrade test for analyzers to make sure they analyze to the same tokens after upgrade diff --git a/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java index c9d5f0b622e..4601c1bcfcf 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.bwcompat; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESBackcompatTestCase; @@ -46,7 +45,7 @@ public class NodesStatsBasicBackwardsCompatIT extends ESBackcompatTestCase { for (NodeInfo n : nodesInfo.getNodes()) { TransportClient tc = TransportClient.builder().settings(settings).build().addTransportAddress(n.getNode().getAddress()); // Just verify that the NS can be sent and serialized/deserialized between nodes with basic indices - NodesStatsResponse ns = tc.admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); + tc.admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); tc.close(); } } @@ -78,7 +77,7 @@ public class NodesStatsBasicBackwardsCompatIT extends ESBackcompatTestCase { method.invoke(nsBuilder); } } - NodesStatsResponse ns = nsBuilder.execute().actionGet(); + nsBuilder.execute().actionGet(); tc.close(); } diff --git a/core/src/test/java/org/elasticsearch/cli/CommandTests.java b/core/src/test/java/org/elasticsearch/cli/CommandTests.java index 1f50ad4c13b..376c883c074 100644 --- a/core/src/test/java/org/elasticsearch/cli/CommandTests.java +++ b/core/src/test/java/org/elasticsearch/cli/CommandTests.java @@ -30,7 +30,7 @@ public class CommandTests extends ESTestCase { } @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { - throw new UserError(ExitCodes.DATA_ERROR, "Bad input"); + throw new UserException(ExitCodes.DATA_ERROR, "Bad input"); } } @@ -40,7 +40,7 @@ public class CommandTests extends ESTestCase { } @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { - throw new UserError(ExitCodes.USAGE, "something was no good"); + throw new UserException(ExitCodes.USAGE, "something was no good"); } } @@ -87,7 +87,7 @@ public class CommandTests extends ESTestCase { MockTerminal terminal = new MockTerminal(); NoopCommand command = new NoopCommand(); String[] args = {"-v", "-s"}; - UserError e = expectThrows(UserError.class, () -> { + UserException e = expectThrows(UserException.class, () -> { command.mainWithoutErrorHandling(args, terminal); }); assertTrue(e.getMessage(), e.getMessage().contains("Cannot specify -s and -v together")); diff --git a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java index 4f91d378440..f4680492028 100644 --- a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java +++ b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java @@ -61,7 +61,7 @@ public class MultiCommandTests extends CommandTestCase { public void testUnknownCommand() throws Exception { multiCommand.subcommands.put("something", new DummySubCommand()); - UserError e = expectThrows(UserError.class, () -> { + UserException e = expectThrows(UserException.class, () -> { execute("somethingelse"); }); assertEquals(ExitCodes.USAGE, e.exitCode); @@ -70,7 +70,7 @@ public class MultiCommandTests extends CommandTestCase { public void testMissingCommand() throws Exception { multiCommand.subcommands.put("command1", new DummySubCommand()); - UserError e = expectThrows(UserError.class, () -> { + UserException e = expectThrows(UserException.class, () -> { execute(); }); assertEquals(ExitCodes.USAGE, e.exitCode); diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 196b053ff82..276a43581a6 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -179,7 +179,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception t) { Throwable e = unwrap(t, InternalException.class); assertThat("expected action [" + action + "] to throw an internal exception", e, notNullValue()); assertThat(action, equalTo(((InternalException) e).action)); diff --git a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index f69c8f2da0b..04f7b73b1f2 100644 --- a/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -46,7 +46,9 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { Settings settings = HEADER_SETTINGS; Actions actions = new Actions(settings, threadPool, testedActions); - return new NodeClient(settings, threadPool, actions); + NodeClient client = new NodeClient(settings, threadPool); + client.intialize(actions); + return client; } private static class Actions extends HashMap { diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index c085c3164a0..9d2c176dffb 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -187,19 +187,13 @@ abstract class FailAndRetryMockTransport imp } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} @Override public Map profileBoundAddresses() { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 5c07f5e6f25..3d50e2e44ac 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.client.transport; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; @@ -123,7 +122,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void handleResponse(T response) { LivenessResponse livenessResponse = new LivenessResponse(clusterName, - new DiscoveryNode(node.getName(), node.getId(), "liveness-hostname" + node.getId(), + new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), "liveness-hostname" + node.getId(), "liveness-hostaddress" + node.getId(), new LocalTransportAddress("liveness-address-" + node.getId()), node.getAttributes(), node.getRoles(), node.getVersion())); @@ -171,7 +170,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { finalFailures.incrementAndGet(); finalFailure.set(e); latch.countDown(); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java index 72748a59986..555f23813cb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; @@ -320,7 +320,8 @@ public class ClusterChangedEventTests extends ESTestCase { // Create a new DiscoveryNode private static DiscoveryNode newNode(final String nodeId, Set roles) { - return new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, Collections.emptyMap(), roles, Version.CURRENT); + return new DiscoveryNode(nodeId, nodeId, nodeId, "host", "host_address", new LocalTransportAddress("_test_" + nodeId), + Collections.emptyMap(), roles, Version.CURRENT); } // Create the metadata for a cluster state. diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 27b9192006d..f0e45ff37bb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; @@ -31,7 +30,6 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; @@ -44,6 +42,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; @@ -56,11 +55,13 @@ import org.hamcrest.Matchers; import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Collections.emptySet; +import static java.util.Collections.singletonList; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -74,10 +75,10 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class ClusterInfoServiceIT extends ESIntegTestCase { - public static class TestPlugin extends Plugin { - - public void onModule(ActionModule module) { - module.registerFilter(BlockingActionFilter.class); + public static class TestPlugin extends Plugin implements ActionPlugin { + @Override + public List> getActionFilters() { + return singletonList(BlockingActionFilter.class); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index b82b5e0ba60..68a0f73eb34 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -52,6 +53,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collections; @@ -121,7 +123,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { Diff diffBeforeSerialization = clusterState.diff(previousClusterState); BytesStreamOutput os = new BytesStreamOutput(); diffBeforeSerialization.writeTo(os); - byte[] diffBytes = os.bytes().toBytes(); + byte[] diffBytes = BytesReference.toBytes(os.bytes()); Diff diff; try (StreamInput input = StreamInput.wrap(diffBytes)) { diff = previousClusterStateFromDiffs.readDiffFrom(input); @@ -190,9 +192,8 @@ public class ClusterStateDiffIT extends ESIntegTestCase { List nodeIds = randomSubsetOf(randomInt(clusterState.nodes().getNodes().size() - 1), clusterState.nodes().getNodes().keys().toArray(String.class)); for (String nodeId : nodeIds) { if (nodeId.startsWith("node-")) { + nodes.remove(nodeId); if (randomBoolean()) { - nodes.remove(nodeId); - } else { nodes.put(new DiscoveryNode(nodeId, new LocalTransportAddress(randomAsciiOfLength(10)), emptyMap(), emptySet(), randomVersion(random()))); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 5e272a27459..6b99e525cb2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import static java.util.Collections.emptyMap; @@ -32,8 +32,9 @@ import static org.hamcrest.Matchers.equalTo; public class ClusterStateTests extends ESTestCase { public void testSupersedes() { - final DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); - final DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final Version version = Version.CURRENT; + final DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); + final DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); final DiscoveryNodes nodes = DiscoveryNodes.builder().put(node1).put(node2).build(); ClusterName name = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY); ClusterState noMaster1 = ClusterState.builder(name).version(randomInt(5)).nodes(nodes).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index 82a6777c0da..54c1b74eba8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -23,8 +23,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,7 +31,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -200,11 +198,11 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/most", "/dev/sda", 100, 90, 80), }; List nodeStats = Arrays.asList( - new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); @@ -241,11 +239,11 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/least", "/dev/sda", 10, -8, 0), }; List nodeStats = Arrays.asList( - new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 61bb898acc2..aad2aa212a1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -395,8 +395,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - failure.set(t); + public void onFailure(String source, Exception e) { + failure.set(e); latch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index 5eb5a34c44f..b0bc3ee2e4e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.test.ESTestCase; @@ -64,7 +64,7 @@ public class NodeConnectionsServiceTests extends ESTestCase { List nodes = new ArrayList<>(); for (int i = randomIntBetween(20, 50); i > 0; i--) { Set roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); - nodes.add(new DiscoveryNode("node_" + i, "" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + nodes.add(new DiscoveryNode("node_" + i, "" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT)); } return nodes; @@ -253,18 +253,12 @@ public class NodeConnectionsServiceTests extends ESTestCase { } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} } } diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 1faac874114..d12b6b563b3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -41,6 +40,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -305,7 +305,8 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCa return randomSubsetOf(1, shards.toArray(new ShardRouting[0])).get(0); } else { return - TestShardRouting.newShardRouting(shardRouting.shardId(), DiscoveryNodeService.generateNodeId(Settings.EMPTY), randomBoolean(), randomFrom(ShardRoutingState.values())); + TestShardRouting.newShardRouting(shardRouting.shardId(), UUIDs.randomBase64UUID(random()), randomBoolean(), + randomFrom(ShardRoutingState.values())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 7f0ac1a6e45..d387d6f7d43 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -148,7 +148,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -196,7 +196,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -245,9 +245,9 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); - throwable.set(t); + throwable.set(e); latch.countDown(); assert false; } @@ -281,7 +281,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { failure.set(true); } }); @@ -313,7 +313,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -348,8 +348,8 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - failure.set(t); + public void onFailure(Exception e) { + failure.set(e); latch.countDown(); } }); @@ -401,7 +401,7 @@ public class ShardStateActionTests extends ESTestCase { } } - private Throwable getSimulatedFailure() { + private Exception getSimulatedFailure() { return new CorruptIndexException("simulated", (String) null); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java index 7f2d0828128..a7fe1b918c0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java @@ -55,7 +55,7 @@ public class ClusterBlockTests extends ESTestCase { out.setVersion(version); clusterBlock.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(version); ClusterBlock result = ClusterBlock.readClusterBlock(in); diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index fd1e3e62466..8718c479216 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -118,8 +118,8 @@ public class ClusterStateHealthTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("unexpected failure", t); + public void onFailure(String source, Exception e) { + logger.warn("unexpected failure", e); } }); @@ -169,7 +169,7 @@ public class ClusterStateHealthTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); clusterStateHealth.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); clusterStateHealth = new ClusterStateHealth(in); } return clusterStateHealth; diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index aec701052fb..8dd950ba8e6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -60,8 +60,7 @@ public class IndexGraveyardTests extends ESTestCase { final IndexGraveyard graveyard = createRandom(); final BytesStreamOutput out = new BytesStreamOutput(); graveyard.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - assertThat(IndexGraveyard.fromStream(in), equalTo(graveyard)); + assertThat(IndexGraveyard.fromStream(out.bytes().streamInput()), equalTo(graveyard)); } public void testXContent() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 0c9827587ea..5fef33be388 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -69,7 +69,7 @@ public class IndexMetaDataTests extends ESTestCase { final BytesStreamOutput out = new BytesStreamOutput(); metaData.writeTo(out); - IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(StreamInput.wrap(out.bytes())); + IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(out.bytes().streamInput()); assertEquals(metaData, deserialized); assertEquals(metaData.hashCode(), deserialized.hashCode()); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 04d27020273..ff31a72a5c6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.InvalidIndexNameException; @@ -181,7 +181,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { } private DiscoveryNode newNode(String nodeId) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), Collections.unmodifiableSet(new HashSet<>(Arrays.asList(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA))), Version.CURRENT); } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index e1e3a39122c..cf040fb3c7f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -34,7 +33,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.nio.ByteBuffer; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -185,8 +183,7 @@ public class MetaDataTests extends ESTestCase { final MetaData originalMeta = MetaData.builder().indexGraveyard(graveyard).build(); final BytesStreamOutput out = new BytesStreamOutput(); originalMeta.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - final MetaData fromStreamMeta = MetaData.PROTO.readFrom(in); + final MetaData fromStreamMeta = MetaData.PROTO.readFrom(out.bytes().streamInput()); assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java index 38aa73a9935..59f058a95fb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.cluster.node; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -64,10 +64,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -77,10 +78,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -91,13 +93,14 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final Version version = Version.CURRENT; + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name3", "id3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name3", "id3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(false)); } @@ -111,7 +114,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { Map attributes = new HashMap<>(); attributes.put("tag", "A"); attributes.put("group", "B"); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(true)); @@ -119,7 +122,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { attributes.put("tag", "A"); attributes.put("group", "B"); attributes.put("name", "X"); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(true)); @@ -127,11 +130,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { attributes.put("tag", "A"); attributes.put("group", "F"); attributes.put("name", "X"); - node = new DiscoveryNode("name3", "id3", DummyTransportAddress.INSTANCE, + node = new DiscoveryNode("name3", "id3", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); - node = new DiscoveryNode("name4", "id4", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name4", "id4", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -141,7 +144,8 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); } @@ -152,7 +156,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -163,7 +167,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -174,7 +178,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -185,7 +189,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -196,7 +200,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -207,7 +211,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -218,7 +222,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -229,7 +233,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -240,7 +244,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java index fb38a428a76..e9f80426df6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.cluster.node; -import org.elasticsearch.Version; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -55,7 +55,8 @@ public class DiscoveryNodeServiceTests extends ESTestCase { } } DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(builder.build()); - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(DummyTransportAddress.INSTANCE); + DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(LocalTransportAddress.buildUnique(), + () -> UUIDs.randomBase64UUID(random())); assertThat(discoveryNode.getRoles(), equalTo(selectedRoles)); assertThat(discoveryNode.getAttributes(), equalTo(expectedAttributes)); } @@ -77,7 +78,8 @@ public class DiscoveryNodeServiceTests extends ESTestCase { expectedAttributes.putAll(customAttributes); discoveryNodeService.addCustomAttributeProvider(() -> customAttributes); - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(DummyTransportAddress.INSTANCE); + DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(LocalTransportAddress.buildUnique(), + () -> UUIDs.randomBase64UUID(random())); assertThat(discoveryNode.getAttributes(), equalTo(expectedAttributes)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index b0942ab401c..ec741a908c5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -19,8 +19,9 @@ package org.elasticsearch.cluster.node; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -30,10 +31,15 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.nullValue; public class DiscoveryNodesTests extends ESTestCase { @@ -53,7 +59,7 @@ public class DiscoveryNodesTests extends ESTestCase { DiscoveryNode resolvedNode = discoveryNodes.resolveNode(nodeSelector.selector); assertThat(matchingNodeIds.size(), equalTo(1)); assertThat(resolvedNode.getId(), equalTo(matchingNodeIds.iterator().next())); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { if (matchingNodeIds.size() == 0) { assertThat(e.getMessage(), equalTo("failed to resolve [" + nodeSelector.selector + "], no matching nodes")); } else if (matchingNodeIds.size() > 1) { @@ -91,33 +97,105 @@ public class DiscoveryNodesTests extends ESTestCase { expectedNodeIdsSet.add(discoveryNode.getId()); } - String[] resolvedNodesIds = discoveryNodes.resolveNodesIds(nodeSelectors.toArray(new String[nodeSelectors.size()])); + String[] resolvedNodesIds = discoveryNodes.resolveNodes(nodeSelectors.toArray(new String[nodeSelectors.size()])); Arrays.sort(resolvedNodesIds); String[] expectedNodesIds = expectedNodeIdsSet.toArray(new String[expectedNodeIdsSet.size()]); Arrays.sort(expectedNodesIds); assertThat(resolvedNodesIds, equalTo(expectedNodesIds)); } - private static DiscoveryNodes buildDiscoveryNodes() { - int numNodes = randomIntBetween(1, 10); - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + public void testDeltas() { + Set nodesA = new HashSet<>(); + nodesA.addAll(randomNodes(1 + randomInt(10))); + Set nodesB = new HashSet<>(); + nodesB.addAll(randomNodes(1 + randomInt(5))); + for (DiscoveryNode node : randomSubsetOf(nodesA)) { + if (randomBoolean()) { + // change an attribute + Map attrs = new HashMap<>(node.getAttributes()); + attrs.put("new", "new"); + node = new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), attrs, node.getRoles(), node.getVersion()); + } + nodesB.add(node); + } + + DiscoveryNode masterA = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesA); + DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB); + + DiscoveryNodes.Builder builderA = DiscoveryNodes.builder(); + nodesA.stream().forEach(builderA::put); + final String masterAId = masterA == null ? null : masterA.getId(); + builderA.masterNodeId(masterAId); + builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId()); + + DiscoveryNodes.Builder builderB = DiscoveryNodes.builder(); + nodesB.stream().forEach(builderB::put); + final String masterBId = masterB == null ? null : masterB.getId(); + builderB.masterNodeId(masterBId); + builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId()); + + final DiscoveryNodes discoNodesA = builderA.build(); + final DiscoveryNodes discoNodesB = builderB.build(); + logger.info("nodes A: {}", discoNodesA.prettyPrint()); + logger.info("nodes B: {}", discoNodesB.prettyPrint()); + + DiscoveryNodes.Delta delta = discoNodesB.delta(discoNodesA); + + if (masterB == null || Objects.equals(masterAId, masterBId)) { + assertFalse(delta.masterNodeChanged()); + assertThat(delta.previousMasterNode(), nullValue()); + assertThat(delta.newMasterNode(), nullValue()); + } else { + assertTrue(delta.masterNodeChanged()); + assertThat(delta.newMasterNode().getId(), equalTo(masterBId)); + assertThat(delta.previousMasterNode() != null ? delta.previousMasterNode().getId() : null, + equalTo(masterAId)); + } + + Set newNodes = new HashSet<>(nodesB); + newNodes.removeAll(nodesA); + assertThat(delta.added(), equalTo(newNodes.isEmpty() == false)); + assertThat(delta.addedNodes(), containsInAnyOrder(newNodes.stream().collect(Collectors.toList()).toArray())); + assertThat(delta.addedNodes().size(), equalTo(newNodes.size())); + + Set removedNodes = new HashSet<>(nodesA); + removedNodes.removeAll(nodesB); + assertThat(delta.removed(), equalTo(removedNodes.isEmpty() == false)); + assertThat(delta.removedNodes(), containsInAnyOrder(removedNodes.stream().collect(Collectors.toList()).toArray())); + assertThat(delta.removedNodes().size(), equalTo(removedNodes.size())); + } + + private static AtomicInteger idGenerator = new AtomicInteger(); + + private static List randomNodes(final int numNodes) { List nodesList = new ArrayList<>(); for (int i = 0; i < numNodes; i++) { Map attributes = new HashMap<>(); if (frequently()) { attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); } - final DiscoveryNode node = newNode(i, attributes, new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())))); - discoBuilder = discoBuilder.put(node); + final DiscoveryNode node = newNode(idGenerator.getAndIncrement(), attributes, + new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())))); nodesList.add(node); } + return nodesList; + } + + private static DiscoveryNodes buildDiscoveryNodes() { + int numNodes = randomIntBetween(1, 10); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + List nodesList = randomNodes(numNodes); + for (DiscoveryNode node : nodesList) { + discoBuilder = discoBuilder.put(node); + } discoBuilder.localNodeId(randomFrom(nodesList).getId()); discoBuilder.masterNodeId(randomFrom(nodesList).getId()); return discoBuilder.build(); } private static DiscoveryNode newNode(int nodeId, Map attributes, Set roles) { - return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, DummyTransportAddress.INSTANCE, attributes, roles, Version.CURRENT); + return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, LocalTransportAddress.buildUnique(), attributes, roles, + Version.CURRENT); } private enum NodeSelector { @@ -152,7 +230,7 @@ public class DiscoveryNodesTests extends ESTestCase { nodes.getIngestNodes().keysIt().forEachRemaining(ids::add); return ids; } - },CUSTOM_ATTRIBUTE("attr:value") { + }, CUSTOM_ATTRIBUTE("attr:value") { @Override Set matchingNodeIds(DiscoveryNodes nodes) { Set ids = new HashSet<>(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java index 4dceb42cfbc..32072282d6f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java @@ -50,7 +50,7 @@ public class PrimaryTermsTests extends ESAllocationTestCase { private RoutingTable testRoutingTable; private int numberOfShards; private int numberOfReplicas; - private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + private static final Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private AllocationService allocationService; private ClusterState clusterState; @@ -65,7 +65,7 @@ public class PrimaryTermsTests extends ESAllocationTestCase { .build()); this.numberOfShards = randomIntBetween(1, 5); this.numberOfReplicas = randomIntBetween(1, 5); - logger.info("Setup test with " + this.numberOfShards + " shards and " + this.numberOfReplicas + " replicas."); + logger.info("Setup test with {} shards and {} replicas.", this.numberOfShards, this.numberOfReplicas); this.primaryTermsPerIndex.clear(); MetaData metaData = MetaData.builder() .put(createIndexMetaData(TEST_INDEX_1)) @@ -87,7 +87,7 @@ public class PrimaryTermsTests extends ESAllocationTestCase { * puts primary shard routings into initializing state */ private void initPrimaries() { - logger.info("adding " + (this.numberOfReplicas + 1) + " nodes and performing rerouting"); + logger.info("adding {} nodes and performing rerouting", this.numberOfReplicas + 1); Builder discoBuilder = DiscoveryNodes.builder(); for (int i = 0; i < this.numberOfReplicas + 1; i++) { discoBuilder = discoBuilder.put(newNode("node" + i)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 2d1a467a001..9da5e76ed1f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -47,7 +47,7 @@ public class RoutingTableTests extends ESAllocationTestCase { private int numberOfReplicas; private int shardsPerIndex; private int totalNumberOfShards; - private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + private static final Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private final AllocationService ALLOCATION_SERVICE = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", Integer.MAX_VALUE) // don't limit recoveries .put("cluster.routing.allocation.node_initial_primaries_recoveries", Integer.MAX_VALUE) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index 7267252b19f..fa9133f6d36 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -206,7 +206,7 @@ public class ShardRoutingTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); routing.writeTo(out); - routing = new ShardRouting(StreamInput.wrap(out.bytes())); + routing = new ShardRouting(out.bytes().streamInput()); } if (routing.initializing() || routing.relocating()) { assertEquals(routing.toString(), byteSize, routing.getExpectedShardSize()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 75300a4beb8..ec33a3cd5fc 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -82,7 +82,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { meta.writeTo(out); out.close(); - UnassignedInfo read = new UnassignedInfo(StreamInput.wrap(out.bytes())); + UnassignedInfo read = new UnassignedInfo(out.bytes().streamInput()); assertThat(read.getReason(), equalTo(meta.getReason())); assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getMessage(), equalTo(meta.getMessage())); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 28f27b8988c..f95fb687c76 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -430,7 +430,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { ); BytesStreamOutput bytes = new BytesStreamOutput(); AllocationCommands.writeTo(commands, bytes); - StreamInput in = StreamInput.wrap(bytes.bytes()); + StreamInput in = bytes.bytes().streamInput(); // Since the commands are named writeable we need to register them and wrap the input stream NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 48de64399e0..e4028da1138 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -25,8 +25,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -44,12 +42,14 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.gateway.NoopGatewayAllocator; @@ -307,11 +307,11 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNodes() { ShardId shard1 = new ShardId("test1", "_na_", 0); ShardId shard2 = new ShardId("test2", "_na_", 0); - final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode newNode = new DiscoveryNode("newNode", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT); - final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); - final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shard1.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) @@ -347,11 +347,11 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { } public void testRestoreDoesNotAllocateSnapshotOnOlderNodes() { - final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode newNode = new DiscoveryNode("newNode", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT); - final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); - final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); int numberOfShards = randomIntBetween(1, 3); @@ -407,7 +407,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { return clusterState; } - private final void assertRecoveryNodeVersions(RoutingNodes routingNodes) { + private void assertRecoveryNodeVersions(RoutingNodes routingNodes) { logger.trace("RoutingNodes: {}", routingNodes.prettyPrint()); List mutableShardRoutings = routingNodes.shardsWithState(ShardRoutingState.RELOCATING); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java index 3d0475ed137..e09d9790651 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.emptyMap; @@ -63,9 +63,9 @@ public class SameShardRoutingTests extends ESAllocationTestCase { logger.info("--> adding two nodes with the same host"); clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder() - .put(new DiscoveryNode("node1", "node1", "test1", "test1", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node1", "node1", "node1", "test1", "test1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT)) - .put(new DiscoveryNode("node2", "node2", "test1", "test1", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node2", "node2", "node2", "test1", "test1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT))).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -82,7 +82,7 @@ public class SameShardRoutingTests extends ESAllocationTestCase { logger.info("--> add another node, with a different host, replicas will be allocating"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) - .put(new DiscoveryNode("node3", "node3", "test2", "test2", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node3", "node3", "node3", "test2", "test2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT))).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index be50c5f5331..56ca6381af9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; @@ -110,9 +109,9 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { final Index index = metaData.index("test").getIndex(); ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); - DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_0 = new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); - DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_1 = new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); RoutingTable routingTable = RoutingTable.builder() @@ -149,9 +148,9 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); ImmutableOpenMap.Builder shardRoutingMap = ImmutableOpenMap.builder(); - DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_0 = new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); - DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_1 = new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 7b81d3ece27..4fa6615ac45 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -80,7 +80,7 @@ public class ClusterSerializationTests extends ESAllocationTestCase { BytesStreamOutput outStream = new BytesStreamOutput(); source.writeTo(outStream); - StreamInput inStream = StreamInput.wrap(outStream.bytes().toBytes()); + StreamInput inStream = outStream.bytes().streamInput(); RoutingTable target = RoutingTable.Builder.readFrom(inStream); assertThat(target.prettyPrint(), equalTo(source.prettyPrint())); diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java index 99cde60f086..9957a6d3603 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.emptyMap; @@ -50,7 +50,7 @@ public class ClusterStateToStringTests extends ESAllocationTestCase { .addAsNew(metaData.index("test_idx")) .build(); - DiscoveryNodes nodes = DiscoveryNodes.builder().put(new DiscoveryNode("node_foo", DummyTransportAddress.INSTANCE, + DiscoveryNodes nodes = DiscoveryNodes.builder().put(new DiscoveryNode("node_foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)).localNodeId("node_foo").masterNodeId("node_foo").build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).nodes(nodes) diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java index 452c6054576..611c261e334 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java @@ -310,7 +310,7 @@ public class DiffableTests extends ESTestCase { logger.debug("--> serializing diff"); BytesStreamOutput out = new BytesStreamOutput(); diffMap.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); logger.debug("--> reading diff back"); diffMap = readDiff(in); } diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 991f11a4493..23713832edf 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -94,7 +94,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -127,8 +127,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -165,7 +165,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -198,8 +198,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -240,7 +240,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -272,8 +272,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -313,7 +313,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -346,8 +346,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -388,7 +388,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { invoked1.countDown(); fail(); } @@ -403,7 +403,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } @@ -458,7 +458,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { invoked3.countDown(); fail(); } @@ -473,7 +473,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -591,7 +591,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Singleton - public static class MasterAwareService extends AbstractLifecycleComponent implements LocalNodeMasterListener { + public static class MasterAwareService extends AbstractLifecycleComponent implements LocalNodeMasterListener { private final ClusterService clusterService; private volatile boolean master; diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index 66f96f8cd3a..54f6233631b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -109,7 +109,7 @@ public class ClusterServiceTests extends ESTestCase { TimedClusterService timedClusterService = new TimedClusterService(Settings.builder().put("cluster.name", "ClusterServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool); - timedClusterService.setLocalNode(new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), + timedClusterService.setLocalNode(new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); timedClusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { @Override @@ -149,8 +149,8 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); + public void onFailure(String source, Exception e) { + throw new RuntimeException(e); } }); @@ -163,7 +163,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { timedOut.countDown(); } @@ -183,8 +183,8 @@ public class ClusterServiceTests extends ESTestCase { final CountDownLatch allProcessed = new CountDownLatch(1); clusterService.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); + public void onFailure(String source, Exception e) { + throw new RuntimeException(e); } @Override @@ -212,7 +212,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { taskFailed[0] = true; latch1.countDown(); } @@ -237,7 +237,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { taskFailed[0] = true; latch2.countDown(); } @@ -286,7 +286,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } } ); @@ -326,9 +326,9 @@ public class ClusterServiceTests extends ESTestCase { ClusterStateTaskListener listener = new ClusterStateTaskListener() { @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure: [{}]", t, source); - failures.add(new Tuple<>(source, t)); + public void onFailure(String source, Exception e) { + logger.error("unexpected failure: [{}]", e, source); + failures.add(new Tuple<>(source, e)); updateLatch.countDown(); } @@ -387,8 +387,8 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } })) ; } @@ -523,8 +523,8 @@ public class ClusterServiceTests extends ESTestCase { final CountDownLatch updateLatch = new CountDownLatch(totalTaskCount); final ClusterStateTaskListener listener = new ClusterStateTaskListener() { @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } @Override @@ -647,8 +647,8 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } }; @@ -693,7 +693,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -710,7 +710,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } }); @@ -727,7 +727,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -745,7 +745,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -788,7 +788,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -807,7 +807,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } }); @@ -824,7 +824,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -841,7 +841,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -859,7 +859,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -902,7 +902,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } public void close() { @@ -930,7 +930,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 1c25659d2cf..7f423d1bb9e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -263,7 +263,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { .get(); fail("bogus value"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.publish_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [discovery.zen.publish_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); } assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); diff --git a/core/src/test/java/org/elasticsearch/common/BooleansTests.java b/core/src/test/java/org/elasticsearch/common/BooleansTests.java index 6e5446cebf9..176c4c75dc7 100644 --- a/core/src/test/java/org/elasticsearch/common/BooleansTests.java +++ b/core/src/test/java/org/elasticsearch/common/BooleansTests.java @@ -51,9 +51,9 @@ public class BooleansTests extends ESTestCase { assertThat(Booleans.parseBoolean(null, false), is(false)); assertThat(Booleans.parseBoolean(null, true), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(false)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true)); + assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); + assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(false)); + assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); assertThat(Booleans.parseBoolean(null, Boolean.FALSE), is(false)); assertThat(Booleans.parseBoolean(null, Boolean.TRUE), is(true)); assertThat(Booleans.parseBoolean(null, null), nullValue()); @@ -70,7 +70,7 @@ public class BooleansTests extends ESTestCase { assertThat(Booleans.parseBooleanExact(randomFrom("true", "on", "yes", "1")), is(true)); assertThat(Booleans.parseBooleanExact(randomFrom("false", "off", "no", "0")), is(false)); try { - Booleans.parseBooleanExact(randomFrom(null, "fred", "foo", "barney")); + Booleans.parseBooleanExact(randomFrom("fred", "foo", "barney", null)); fail("Expected exception while parsing invalid boolean value "); } catch (Exception ex) { assertTrue(ex instanceof IllegalArgumentException); diff --git a/core/src/test/java/org/elasticsearch/common/ChannelsTests.java b/core/src/test/java/org/elasticsearch/common/ChannelsTests.java index 5bb9c614b84..c0cb3482b0e 100644 --- a/core/src/test/java/org/elasticsearch/common/ChannelsTests.java +++ b/core/src/test/java/org/elasticsearch/common/ChannelsTests.java @@ -19,14 +19,11 @@ package org.elasticsearch.common; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer; -import org.jboss.netty.buffer.ChannelBuffer; import org.junit.After; import org.junit.Before; @@ -85,7 +82,7 @@ public class ChannelsTests extends ESTestCase { BytesReference source = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); BytesReference read = new BytesArray(readBytes, offset + offsetToRead, lengthToRead); - assertThat("read bytes didn't match written bytes", source.toBytes(), Matchers.equalTo(read.toBytes())); + assertThat("read bytes didn't match written bytes", BytesReference.toBytes(source), Matchers.equalTo(BytesReference.toBytes(read))); } public void testBufferReadPastEOFWithException() throws Exception { @@ -157,21 +154,9 @@ public class ChannelsTests extends ESTestCase { copy.flip(); BytesReference sourceRef = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); - BytesReference copyRef = new ByteBufferBytesReference(copy); - - assertTrue("read bytes didn't match written bytes", sourceRef.equals(copyRef)); - } - - - public void testWriteFromChannel() throws IOException { - int length = randomIntBetween(1, randomBytes.length / 2); - int offset = randomIntBetween(0, randomBytes.length - length); - ByteBuffer byteBuffer = ByteBuffer.wrap(randomBytes); - ChannelBuffer source = new ByteBufferBackedChannelBuffer(byteBuffer); - Channels.writeToChannel(source, offset, length, fileChannel); - - BytesReference copyRef = new BytesArray(Channels.readFromFileChannel(fileChannel, 0, length)); - BytesReference sourceRef = new BytesArray(randomBytes, offset, length); + byte[] tmp = new byte[copy.remaining()]; + copy.duplicate().get(tmp); + BytesReference copyRef = new BytesArray(tmp); assertTrue("read bytes didn't match written bytes", sourceRef.equals(copyRef)); } diff --git a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index bb9d23db1cb..f10a0da3029 100644 --- a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -43,7 +43,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); final Thread[] threads = new Thread[NUM_THREADS]; final AtomicBoolean tripped = new AtomicBoolean(false); - final AtomicReference lastException = new AtomicReference<>(null); + final AtomicReference lastException = new AtomicReference<>(null); final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue((BYTES_PER_THREAD * NUM_THREADS) - 1), 1.0, logger); @@ -60,8 +60,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { } else { assertThat(tripped.compareAndSet(false, true), equalTo(true)); } - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } @@ -117,8 +117,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { } else { assertThat(tripped.compareAndSet(false, true), equalTo(true)); } - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } @@ -178,8 +178,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { breaker.addEstimateBytesAndMaybeBreak(1L, "test"); } catch (CircuitBreakingException e) { tripped.incrementAndGet(); - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } diff --git a/core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java b/core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java new file mode 100644 index 00000000000..c521314f92d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java @@ -0,0 +1,638 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.bytes; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ByteArray; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.io.EOFException; +import java.io.IOException; +import java.util.Arrays; + +public abstract class AbstractBytesReferenceTestCase extends ESTestCase { + + protected static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; + protected final BigArrays bigarrays = new BigArrays(null, new NoneCircuitBreakerService(), false); + + public void testGet() throws IOException { + int length = randomIntBetween(1, PAGE_SIZE * 3); + BytesReference pbr = newBytesReference(length); + + int sliceOffset = randomIntBetween(0, length / 2); + int sliceLength = Math.max(1, length - sliceOffset - 1); + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + assertEquals(pbr.get(sliceOffset), slice.get(0)); + assertEquals(pbr.get(sliceOffset + sliceLength - 1), slice.get(sliceLength - 1)); + final int probes = randomIntBetween(20, 100); + BytesReference copy = new BytesArray(pbr.toBytesRef(), true); + for (int i = 0; i < probes; i++) { + int index = randomIntBetween(0, copy.length() - 1); + assertEquals(pbr.get(index), copy.get(index)); + index = randomIntBetween(sliceOffset, sliceOffset + sliceLength - 1); + assertEquals(pbr.get(index), slice.get(index - sliceOffset)); + } + } + + public void testLength() throws IOException { + int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomInt(PAGE_SIZE * 3)}; + + for (int i = 0; i < sizes.length; i++) { + BytesReference pbr = newBytesReference(sizes[i]); + assertEquals(sizes[i], pbr.length()); + } + } + + public void testSlice() throws IOException { + int length = randomInt(PAGE_SIZE * 3); + BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(0, length / 2); + int sliceLength = Math.max(0, length - sliceOffset - 1); + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + assertEquals(sliceLength, slice.length()); + BytesRef singlePageOrNull = getSinglePageOrNull(slice); + if (singlePageOrNull != null) { + assertEquals(sliceOffset, singlePageOrNull.offset); + } + } + + public void testStreamInput() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + StreamInput si = pbr.streamInput(); + assertNotNull(si); + + // read single bytes one by one + assertEquals(pbr.get(0), si.readByte()); + assertEquals(pbr.get(1), si.readByte()); + assertEquals(pbr.get(2), si.readByte()); + + // reset the stream for bulk reading + si.reset(); + + // buffer for bulk reads + byte[] origBuf = new byte[length]; + random().nextBytes(origBuf); + byte[] targetBuf = Arrays.copyOf(origBuf, origBuf.length); + + // bulk-read 0 bytes: must not modify buffer + si.readBytes(targetBuf, 0, 0); + assertEquals(origBuf[0], targetBuf[0]); + si.reset(); + + // read a few few bytes as ints + int bytesToRead = randomIntBetween(1, length / 2); + for (int i = 0; i < bytesToRead; i++) { + int b = si.read(); + assertEquals(pbr.get(i) & 0xff, b); + } + si.reset(); + + // bulk-read all + si.readFully(targetBuf); + assertArrayEquals(BytesReference.toBytes(pbr), targetBuf); + + // continuing to read should now fail with EOFException + try { + si.readByte(); + fail("expected EOF"); + } catch (EOFException | IndexOutOfBoundsException eof) { + // yay + } + + // try to read more than the stream contains + si.reset(); + expectThrows(IndexOutOfBoundsException.class, () -> + si.readBytes(targetBuf, 0, length * 2)); + } + + public void testStreamInputMarkAndReset() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + StreamInput si = pbr.streamInput(); + assertNotNull(si); + + StreamInput wrap = StreamInput.wrap(BytesReference.toBytes(pbr)); + while(wrap.available() > 0) { + if (rarely()) { + wrap.mark(Integer.MAX_VALUE); + si.mark(Integer.MAX_VALUE); + } else if (rarely()) { + wrap.reset(); + si.reset(); + } + assertEquals(si.readByte(), wrap.readByte()); + assertEquals(si.available(), wrap.available()); + } + } + + public void testStreamInputBulkReadWithOffset() throws IOException { + final int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + StreamInput si = pbr.streamInput(); + assertNotNull(si); + + // read a bunch of single bytes one by one + int offset = randomIntBetween(1, length / 2); + for (int i = 0; i < offset; i++) { + assertEquals(si.available(), length - i); + assertEquals(pbr.get(i), si.readByte()); + } + + // now do NOT reset the stream - keep the stream's offset! + + // buffer to compare remaining bytes against bulk read + byte[] pbrBytesWithOffset = Arrays.copyOfRange(BytesReference.toBytes(pbr), offset, length); + // randomized target buffer to ensure no stale slots + byte[] targetBytes = new byte[pbrBytesWithOffset.length]; + random().nextBytes(targetBytes); + + // bulk-read all + si.readFully(targetBytes); + assertArrayEquals(pbrBytesWithOffset, targetBytes); + assertEquals(si.available(), 0); + } + + public void testRandomReads() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + StreamInput streamInput = pbr.streamInput(); + BytesRefBuilder target = new BytesRefBuilder(); + while (target.length() < pbr.length()) { + switch (randomIntBetween(0, 10)) { + case 6: + case 5: + target.append(new BytesRef(new byte[]{streamInput.readByte()})); + break; + case 4: + case 3: + BytesRef bytesRef = streamInput.readBytesRef(scaledRandomIntBetween(1, pbr.length() - target.length())); + target.append(bytesRef); + break; + default: + byte[] buffer = new byte[scaledRandomIntBetween(1, pbr.length() - target.length())]; + int offset = scaledRandomIntBetween(0, buffer.length - 1); + int read = streamInput.read(buffer, offset, buffer.length - offset); + target.append(new BytesRef(buffer, offset, read)); + break; + } + } + assertEquals(pbr.length(), target.length()); + BytesRef targetBytes = target.get(); + assertArrayEquals(BytesReference.toBytes(pbr), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); + } + + public void testSliceStreamInput() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + + // test stream input over slice (upper half of original) + int sliceOffset = randomIntBetween(1, length / 2); + int sliceLength = length - sliceOffset; + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + StreamInput sliceInput = slice.streamInput(); + assertEquals(sliceInput.available(), sliceLength); + + // single reads + assertEquals(slice.get(0), sliceInput.readByte()); + assertEquals(slice.get(1), sliceInput.readByte()); + assertEquals(slice.get(2), sliceInput.readByte()); + assertEquals(sliceInput.available(), sliceLength - 3); + + // reset the slice stream for bulk reading + sliceInput.reset(); + assertEquals(sliceInput.available(), sliceLength); + + // bulk read + byte[] sliceBytes = new byte[sliceLength]; + sliceInput.readFully(sliceBytes); + assertEquals(sliceInput.available(), 0); + + // compare slice content with upper half of original + byte[] pbrSliceBytes = Arrays.copyOfRange(BytesReference.toBytes(pbr), sliceOffset, length); + assertArrayEquals(pbrSliceBytes, sliceBytes); + + // compare slice bytes with bytes read from slice via streamInput :D + byte[] sliceToBytes = BytesReference.toBytes(slice); + assertEquals(sliceBytes.length, sliceToBytes.length); + assertArrayEquals(sliceBytes, sliceToBytes); + + sliceInput.reset(); + assertEquals(sliceInput.available(), sliceLength); + byte[] buffer = new byte[sliceLength + scaledRandomIntBetween(1, 100)]; + int offset = scaledRandomIntBetween(0, Math.max(1, buffer.length - sliceLength - 1)); + int read = sliceInput.read(buffer, offset, sliceLength / 2); + assertEquals(sliceInput.available(), sliceLength - read); + sliceInput.read(buffer, offset + read, sliceLength - read); + assertArrayEquals(sliceBytes, Arrays.copyOfRange(buffer, offset, offset + sliceLength)); + assertEquals(sliceInput.available(), 0); + } + + public void testWriteToOutputStream() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * 4); + BytesReference pbr = newBytesReference(length); + BytesStreamOutput out = new BytesStreamOutput(); + pbr.writeTo(out); + assertEquals(pbr.length(), out.size()); + assertArrayEquals(BytesReference.toBytes(pbr), BytesReference.toBytes(out.bytes())); + out.close(); + } + + public void testInputStreamSkip() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + final int iters = randomIntBetween(5, 50); + for (int i = 0; i < iters; i++) { + try (StreamInput input = pbr.streamInput()) { + final int offset = randomIntBetween(0, length-1); + assertEquals(offset, input.skip(offset)); + assertEquals(pbr.get(offset), input.readByte()); + final int nextOffset = randomIntBetween(offset, length-2); + assertEquals(nextOffset - offset, input.skip(nextOffset - offset)); + assertEquals(pbr.get(nextOffset+1), input.readByte()); // +1 for the one byte we read above + assertEquals(length - (nextOffset+2), input.skip(Long.MAX_VALUE)); + assertEquals(0, input.skip(randomIntBetween(0, Integer.MAX_VALUE))); + } + } + } + + public void testSliceWriteToOutputStream() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 5)); + BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(1, length / 2); + int sliceLength = length - sliceOffset; + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength); + slice.writeTo(sliceOut); + assertEquals(slice.length(), sliceOut.size()); + assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(sliceOut.bytes())); + sliceOut.close(); + } + + public void testToBytes() throws IOException { + int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; + for (int i = 0; i < sizes.length; i++) { + BytesReference pbr = newBytesReference(sizes[i]); + byte[] bytes = BytesReference.toBytes(pbr); + assertEquals(sizes[i], bytes.length); + for (int j = 0; j < bytes.length; j++) { + assertEquals(bytes[j], pbr.get(j)); + } + } + } + + public void testToBytesRefSharedPage() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE); + BytesReference pbr = newBytesReference(length); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); + assertNotNull(ba); + assertNotNull(ba2); + assertEquals(pbr.length(), ba.length()); + assertEquals(ba.length(), ba2.length()); + // single-page optimization + assertSame(ba.array(), ba2.array()); + } + + public void testToBytesRefMaterializedPages() throws IOException { + // we need a length != (n * pagesize) to avoid page sharing at boundaries + int length = 0; + while ((length % PAGE_SIZE) == 0) { + length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); + } + BytesReference pbr = newBytesReference(length); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); + assertNotNull(ba); + assertNotNull(ba2); + assertEquals(pbr.length(), ba.length()); + assertEquals(ba.length(), ba2.length()); + } + + public void testCopyBytesRefSharesBytes() throws IOException { + // small PBR which would normally share the first page + int length = randomIntBetween(10, PAGE_SIZE); + BytesReference pbr = newBytesReference(length); + BytesArray ba = new BytesArray(pbr.toBytesRef(), true); + BytesArray ba2 = new BytesArray(pbr.toBytesRef(), true); + assertNotNull(ba); + assertNotSame(ba, ba2); + assertNotSame(ba.array(), ba2.array()); + } + + public void testSliceCopyBytesRef() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + + BytesArray ba1 = new BytesArray(slice.toBytesRef(), true); + BytesArray ba2 = new BytesArray(slice.toBytesRef(), true); + assertNotNull(ba1); + assertNotNull(ba2); + assertNotSame(ba1.array(), ba2.array()); + assertArrayEquals(BytesReference.toBytes(slice), ba1.array()); + assertArrayEquals(BytesReference.toBytes(slice), ba2.array()); + assertArrayEquals(ba1.array(), ba2.array()); + } + + public void testEmptyToBytesRefIterator() throws IOException { + BytesReference pbr = newBytesReference(0); + assertNull(pbr.iterator().next()); + } + + public void testIterator() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference pbr = newBytesReference(length); + BytesRefIterator iterator = pbr.iterator(); + BytesRef ref; + BytesRefBuilder builder = new BytesRefBuilder(); + while((ref = iterator.next()) != null) { + builder.append(ref); + } + assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); + } + + public void testSliceIterator() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + BytesRefIterator iterator = slice.iterator(); + BytesRef ref = null; + BytesRefBuilder builder = new BytesRefBuilder(); + while((ref = iterator.next()) != null) { + builder.append(ref); + } + assertArrayEquals(BytesReference.toBytes(slice), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); + } + + public void testIteratorRandom() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference pbr = newBytesReference(length); + if (randomBoolean()) { + int sliceOffset = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + pbr = pbr.slice(sliceOffset, sliceLength); + } + + if (randomBoolean()) { + pbr = new BytesArray(pbr.toBytesRef()); + } + BytesRefIterator iterator = pbr.iterator(); + BytesRef ref = null; + BytesRefBuilder builder = new BytesRefBuilder(); + while((ref = iterator.next()) != null) { + builder.append(ref); + } + assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); + } + + public void testArrayOffset() throws IOException { + int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); + BytesReference pbr = newBytesReference(length); + BytesRef singlePageOrNull = getSinglePageOrNull(pbr); + if (singlePageOrNull != null) { + assertEquals(0, singlePageOrNull.offset); + } + } + + public void testSliceArrayOffset() throws IOException { + int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); + BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(0, pbr.length() - 1); // an offset to the end would be len 0 + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + BytesReference slice = pbr.slice(sliceOffset, sliceLength); + BytesRef singlePageOrNull = getSinglePageOrNull(slice); + if (singlePageOrNull != null) { + if (getSinglePageOrNull(pbr) == null) { + // original reference has pages + assertEquals(sliceOffset % PAGE_SIZE, singlePageOrNull.offset); + } else { + // orig ref has no pages ie. BytesArray + assertEquals(sliceOffset, singlePageOrNull.offset); + } + } + } + + public void testToUtf8() throws IOException { + // test empty + BytesReference pbr = newBytesReference(0); + assertEquals("", pbr.utf8ToString()); + // TODO: good way to test? + } + + public void testToBytesRef() throws IOException { + int length = randomIntBetween(0, PAGE_SIZE); + BytesReference pbr = newBytesReference(length); + BytesRef ref = pbr.toBytesRef(); + assertNotNull(ref); + assertEquals(pbr.length(), ref.length); + } + + public void testSliceToBytesRef() throws IOException { + int length = randomIntBetween(0, PAGE_SIZE); + BytesReference pbr = newBytesReference(length); + // get a BytesRef from a slice + int sliceOffset = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); + + BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef(); + + if (sliceLength == 0 && sliceOffset != sliceRef.offset) { + // some impls optimize this to an empty instance then the offset will be 0 + assertEquals(0, sliceRef.offset); + } else { + // note that these are only true if we have <= than a page, otherwise offset/length are shifted + assertEquals(sliceOffset, sliceRef.offset); + } + assertEquals(sliceLength, sliceRef.length); + } + + public void testHashCode() throws IOException { + // empty content must have hash 1 (JDK compat) + BytesReference pbr = newBytesReference(0); + assertEquals(Arrays.hashCode(BytesRef.EMPTY_BYTES), pbr.hashCode()); + + // test with content + pbr = newBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5))); + int jdkHash = Arrays.hashCode(BytesReference.toBytes(pbr)); + int pbrHash = pbr.hashCode(); + assertEquals(jdkHash, pbrHash); + + // test hashes of slices + int sliceFrom = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom); + BytesReference slice = pbr.slice(sliceFrom, sliceLength); + int sliceJdkHash = Arrays.hashCode(BytesReference.toBytes(slice)); + int sliceHash = slice.hashCode(); + assertEquals(sliceJdkHash, sliceHash); + } + + public void testEquals() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5))); + BytesReference copy = bytesReference.slice(0, bytesReference.length()); + + // get refs & compare + assertEquals(copy, bytesReference); + int sliceFrom = randomIntBetween(0, bytesReference.length()); + int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom); + assertEquals(copy.slice(sliceFrom, sliceLength), bytesReference.slice(sliceFrom, sliceLength)); + + BytesRef bytesRef = BytesRef.deepCopyOf(copy.toBytesRef()); + assertEquals(new BytesArray(bytesRef), copy); + + int offsetToFlip = randomIntBetween(0, bytesRef.length - 1); + int value = ~Byte.toUnsignedInt(bytesRef.bytes[bytesRef.offset+offsetToFlip]); + bytesRef.bytes[bytesRef.offset+offsetToFlip] = (byte)value; + assertNotEquals(new BytesArray(bytesRef), copy); + } + + public void testSliceEquals() { + int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); + ByteArray ba1 = bigarrays.newByteArray(length, false); + BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); + + // test equality of slices + int sliceFrom = randomIntBetween(0, pbr.length()); + int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom); + BytesReference slice1 = pbr.slice(sliceFrom, sliceLength); + BytesReference slice2 = pbr.slice(sliceFrom, sliceLength); + assertArrayEquals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice2)); + + // test a slice with same offset but different length, + // unless randomized testing gave us a 0-length slice. + if (sliceLength > 0) { + BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2); + assertFalse(Arrays.equals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice3))); + } + } + + protected abstract BytesReference newBytesReference(int length) throws IOException; + + public void testCompareTo() throws IOException { + final int iters = randomIntBetween(5, 10); + for (int i = 0; i < iters; i++) { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference bytesReference = newBytesReference(length); + assertTrue(bytesReference.compareTo(new BytesArray("")) > 0); + assertTrue(new BytesArray("").compareTo(bytesReference) < 0); + + + assertEquals(0, bytesReference.compareTo(bytesReference)); + int sliceFrom = randomIntBetween(0, bytesReference.length()); + int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom); + BytesReference slice = bytesReference.slice(sliceFrom, sliceLength); + + assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()), + new BytesArray(bytesReference.toBytesRef(), true).compareTo(new BytesArray(slice.toBytesRef(), true))); + + assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()), + bytesReference.compareTo(slice)); + assertEquals(slice.toBytesRef().compareTo(bytesReference.toBytesRef()), + slice.compareTo(bytesReference)); + + assertEquals(0, slice.compareTo(new BytesArray(slice.toBytesRef()))); + assertEquals(0, new BytesArray(slice.toBytesRef()).compareTo(slice)); + + final int crazyLength = length + randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + ReleasableBytesStreamOutput crazyStream = new ReleasableBytesStreamOutput(length, bigarrays); + final int offset = randomIntBetween(0, crazyLength - length); + for (int j = 0; j < offset; j++) { + crazyStream.writeByte((byte) random().nextInt(1 << 8)); + } + bytesReference.writeTo(crazyStream); + for (int j = crazyStream.size(); j < crazyLength; j++) { + crazyStream.writeByte((byte) random().nextInt(1 << 8)); + } + PagedBytesReference crazyReference = crazyStream.bytes(); + + assertFalse(crazyReference.compareTo(bytesReference) == 0); + assertEquals(0, crazyReference.slice(offset, length).compareTo( + bytesReference)); + assertEquals(0, bytesReference.compareTo( + crazyReference.slice(offset, length))); + } + } + + public static BytesRef getSinglePageOrNull(BytesReference ref) throws IOException { + if (ref.length() > 0) { + BytesRefIterator iterator = ref.iterator(); + BytesRef next = iterator.next(); + BytesRef retVal = next.clone(); + if (iterator.next() == null) { + return retVal; + } + } else { + return new BytesRef(); + } + return null; + } + + public static int getNumPages(BytesReference ref) throws IOException { + int num = 0; + if (ref.length() > 0) { + BytesRefIterator iterator = ref.iterator(); + while(iterator.next() != null) { + num++; + } + } + return num; + } + + + public void testBasicEquals() { + final int len = randomIntBetween(0, randomBoolean() ? 10: 100000); + final int offset1 = randomInt(5); + final byte[] array1 = new byte[offset1 + len + randomInt(5)]; + random().nextBytes(array1); + final int offset2 = randomInt(offset1); + final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length); + + final BytesArray b1 = new BytesArray(array1, offset1, len); + final BytesArray b2 = new BytesArray(array2, offset2, len); + assertEquals(b1, b2); + assertEquals(Arrays.hashCode(BytesReference.toBytes(b1)), b1.hashCode()); + assertEquals(Arrays.hashCode(BytesReference.toBytes(b2)), b2.hashCode()); + + // test same instance + assertEquals(b1, b1); + assertEquals(b2, b2); + + if (len > 0) { + // test different length + BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1)); + assertNotEquals(b1, differentLen); + + // test changed bytes + array1[offset1 + randomInt(len - 1)] += 13; + assertNotEquals(b1, b2); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java deleted file mode 100644 index a272b6627e4..00000000000 --- a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.bytes; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.Channels; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.buffer.ChannelBuffers; -import org.jboss.netty.util.CharsetUtil; - -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.channels.GatheringByteChannel; -import java.nio.charset.CharacterCodingException; -import java.nio.charset.CharsetDecoder; -import java.nio.charset.CoderResult; -import java.nio.charset.StandardCharsets; - -/** - * Note: this is only used by one lone test method. - */ -public class ByteBufferBytesReference implements BytesReference { - - private final ByteBuffer buffer; - - public ByteBufferBytesReference(ByteBuffer buffer) { - this.buffer = buffer; - } - - @Override - public byte get(int index) { - return buffer.get(buffer.position() + index); - } - - @Override - public int length() { - return buffer.remaining(); - } - - @Override - public BytesReference slice(int from, int length) { - ByteBuffer dup = buffer.duplicate(); - dup.position(buffer.position() + from); - dup.limit(buffer.position() + from + length); - return new ByteBufferBytesReference(dup); - } - - @Override - public StreamInput streamInput() { - return new ByteBufferStreamInput(buffer); - } - - @Override - public void writeTo(OutputStream os) throws IOException { - if (buffer.hasArray()) { - os.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } else { - byte[] tmp = new byte[8192]; - ByteBuffer buf = buffer.duplicate(); - while (buf.hasRemaining()) { - buf.get(tmp, 0, Math.min(tmp.length, buf.remaining())); - os.write(tmp); - } - } - } - - @Override - public void writeTo(GatheringByteChannel channel) throws IOException { - Channels.writeToChannel(buffer, channel); - } - - @Override - public byte[] toBytes() { - if (!buffer.hasRemaining()) { - return BytesRef.EMPTY_BYTES; - } - byte[] tmp = new byte[buffer.remaining()]; - buffer.duplicate().get(tmp); - return tmp; - } - - @Override - public BytesArray toBytesArray() { - if (buffer.hasArray()) { - return new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } - return new BytesArray(toBytes()); - } - - @Override - public BytesArray copyBytesArray() { - return new BytesArray(toBytes()); - } - - @Override - public ChannelBuffer toChannelBuffer() { - return ChannelBuffers.wrappedBuffer(buffer); - } - - @Override - public boolean hasArray() { - return buffer.hasArray(); - } - - @Override - public byte[] array() { - return buffer.array(); - } - - @Override - public int arrayOffset() { - return buffer.arrayOffset() + buffer.position(); - } - - @Override - public int hashCode() { - return Helper.bytesHashCode(this); - } - - @Override - public boolean equals(Object obj) { - return Helper.bytesEqual(this, (BytesReference) obj); - } - - @Override - public String toUtf8() { - if (!buffer.hasRemaining()) { - return ""; - } - final CharsetDecoder decoder = CharsetUtil.getDecoder(StandardCharsets.UTF_8); - final CharBuffer dst = CharBuffer.allocate( - (int) ((double) buffer.remaining() * decoder.maxCharsPerByte())); - try { - CoderResult cr = decoder.decode(buffer, dst, true); - if (!cr.isUnderflow()) { - cr.throwException(); - } - cr = decoder.flush(dst); - if (!cr.isUnderflow()) { - cr.throwException(); - } - } catch (CharacterCodingException x) { - throw new IllegalStateException(x); - } - return dst.flip().toString(); - } - - @Override - public BytesRef toBytesRef() { - if (buffer.hasArray()) { - return new BytesRef(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } - return new BytesRef(toBytes()); - } - - @Override - public BytesRef copyBytesRef() { - return new BytesRef(toBytes()); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java new file mode 100644 index 00000000000..fff030200b7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.bytes; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.hamcrest.Matchers; + +import java.io.IOException; + +public class BytesArrayTests extends AbstractBytesReferenceTestCase { + @Override + protected BytesReference newBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + final BytesStreamOutput out = new BytesStreamOutput(length); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(length, out.size()); + BytesArray ref = new BytesArray(out.bytes().toBytesRef()); + assertEquals(length, ref.length()); + assertTrue(ref instanceof BytesArray); + assertThat(ref.length(), Matchers.equalTo(length)); + return ref; + } + + public void testArray() throws IOException { + int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; + + for (int i = 0; i < sizes.length; i++) { + BytesArray pbr = (BytesArray) newBytesReference(sizes[i]); + byte[] array = pbr.array(); + assertNotNull(array); + assertEquals(sizes[i], array.length); + assertSame(array, pbr.array()); + } + } + + public void testArrayOffset() throws IOException { + int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); + BytesArray pbr = (BytesArray) newBytesReference(length); + assertEquals(0, pbr.offset()); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java deleted file mode 100644 index 60f4983dd19..00000000000 --- a/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.bytes; - - -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; - -public class BytesReferenceTests extends ESTestCase { - - public void testEquals() { - final int len = randomIntBetween(0, randomBoolean() ? 10: 100000); - final int offset1 = randomInt(5); - final byte[] array1 = new byte[offset1 + len + randomInt(5)]; - random().nextBytes(array1); - final int offset2 = randomInt(offset1); - final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length); - - final BytesArray b1 = new BytesArray(array1, offset1, len); - final BytesArray b2 = new BytesArray(array2, offset2, len); - assertTrue(BytesReference.Helper.bytesEqual(b1, b2)); - assertTrue(BytesReference.Helper.bytesEquals(b1, b2)); - assertEquals(Arrays.hashCode(b1.toBytes()), b1.hashCode()); - assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b2)); - - // test same instance - assertTrue(BytesReference.Helper.bytesEqual(b1, b1)); - assertTrue(BytesReference.Helper.bytesEquals(b1, b1)); - assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b1)); - - if (len > 0) { - // test different length - BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1)); - assertFalse(BytesReference.Helper.bytesEqual(b1, differentLen)); - - // test changed bytes - array1[offset1 + randomInt(len - 1)] += 13; - assertFalse(BytesReference.Helper.bytesEqual(b1, b2)); - assertFalse(BytesReference.Helper.bytesEquals(b1, b2)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java new file mode 100644 index 00000000000..aec957aba68 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.bytes; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class CompositeBytesReferenceTests extends AbstractBytesReferenceTestCase { + @Override + protected BytesReference newBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + List referenceList = newRefList(length); + BytesReference ref = new CompositeBytesReference(referenceList.toArray(new BytesReference[0])); + assertEquals(length, ref.length()); + return ref; + } + + private List newRefList(int length) throws IOException { + List referenceList = new ArrayList<>(); + for (int i = 0; i < length;) { + int remaining = length-i; + int sliceLength = randomIntBetween(1, remaining); + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(sliceLength, bigarrays); + for (int j = 0; j < sliceLength; j++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(sliceLength, out.size()); + referenceList.add(out.bytes()); + i+=sliceLength; + } + return referenceList; + } + + public void testCompositeBuffer() throws IOException { + List referenceList = newRefList(randomIntBetween(1, PAGE_SIZE * 2)); + BytesReference ref = new CompositeBytesReference(referenceList.toArray(new BytesReference[0])); + BytesRefIterator iterator = ref.iterator(); + BytesRefBuilder builder = new BytesRefBuilder(); + + for (BytesReference reference : referenceList) { + BytesRefIterator innerIter = reference.iterator(); // sometimes we have a paged ref - pull an iter and walk all pages! + BytesRef scratch; + while ((scratch = innerIter.next()) != null) { + BytesRef next = iterator.next(); + assertNotNull(next); + assertEquals(next, scratch); + builder.append(next); + } + + } + assertNull(iterator.next()); + + int offset = 0; + for (BytesReference reference : referenceList) { + assertEquals(reference, ref.slice(offset, reference.length())); + int probes = randomIntBetween(Math.min(10, reference.length()), reference.length()); + for (int i = 0; i < probes; i++) { + int index = randomIntBetween(0, reference.length()-1); + assertEquals(ref.get(offset + index), reference.get(index)); + } + offset += reference.length(); + } + + BytesArray array = new BytesArray(builder.toBytesRef()); + assertEquals(array, ref); + assertEquals(array.hashCode(), ref.hashCode()); + + BytesStreamOutput output = new BytesStreamOutput(); + ref.writeTo(output); + assertEquals(array, output.bytes()); + } + + @Override + public void testToBytesRefSharedPage() throws IOException { + // CompositeBytesReference doesn't share pages + } + + @Override + public void testSliceArrayOffset() throws IOException { + // the assertions in this test only work on no-composite buffers + } + + @Override + public void testSliceToBytesRef() throws IOException { + // CompositeBytesReference shifts offsets + } +} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java index 9cb633e410e..6ae2b3cf943 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java +++ b/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.bytes; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,318 +30,35 @@ import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.jboss.netty.buffer.ChannelBuffer; -import org.junit.After; -import org.junit.Before; import java.io.EOFException; import java.io.IOException; -import java.nio.channels.FileChannel; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; import java.util.Arrays; -public class PagedBytesReferenceTests extends ESTestCase { +public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase { - private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; - - private BigArrays bigarrays; - - @Override - @Before - public void setUp() throws Exception { - super.setUp(); - bigarrays = new BigArrays(null, new NoneCircuitBreakerService(), false); - } - - @Override - @After - public void tearDown() throws Exception { - super.tearDown(); - } - - public void testGet() { - int length = randomIntBetween(1, PAGE_SIZE * 3); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(0, length / 2); - int sliceLength = Math.max(1, length - sliceOffset - 1); - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - assertEquals(pbr.get(sliceOffset), slice.get(0)); - assertEquals(pbr.get(sliceOffset + sliceLength - 1), slice.get(sliceLength - 1)); - } - - public void testLength() { - int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomInt(PAGE_SIZE * 3)}; - - for (int i = 0; i < sizes.length; i++) { - BytesReference pbr = getRandomizedPagedBytesReference(sizes[i]); - assertEquals(sizes[i], pbr.length()); + protected BytesReference newBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); } + assertThat(out.size(), Matchers.equalTo(length)); + BytesReference ref = out.bytes(); + assertThat(ref.length(), Matchers.equalTo(length)); + assertThat(ref, Matchers.instanceOf(PagedBytesReference.class)); + return ref; } - public void testSlice() { - int length = randomInt(PAGE_SIZE * 3); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(0, length / 2); - int sliceLength = Math.max(0, length - sliceOffset - 1); - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - assertEquals(sliceLength, slice.length()); - - if (slice.hasArray()) { - assertEquals(sliceOffset, slice.arrayOffset()); - } else { - try { - slice.arrayOffset(); - fail("expected IllegalStateException"); - } catch (IllegalStateException ise) { - // expected - } - } - } - - public void testStreamInput() throws IOException { - int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - StreamInput si = pbr.streamInput(); - assertNotNull(si); - - // read single bytes one by one - assertEquals(pbr.get(0), si.readByte()); - assertEquals(pbr.get(1), si.readByte()); - assertEquals(pbr.get(2), si.readByte()); - - // reset the stream for bulk reading - si.reset(); - - // buffer for bulk reads - byte[] origBuf = new byte[length]; - random().nextBytes(origBuf); - byte[] targetBuf = Arrays.copyOf(origBuf, origBuf.length); - - // bulk-read 0 bytes: must not modify buffer - si.readBytes(targetBuf, 0, 0); - assertEquals(origBuf[0], targetBuf[0]); - si.reset(); - - // read a few few bytes as ints - int bytesToRead = randomIntBetween(1, length / 2); - for (int i = 0; i < bytesToRead; i++) { - int b = si.read(); - assertEquals(pbr.get(i), b); - } - si.reset(); - - // bulk-read all - si.readFully(targetBuf); - assertArrayEquals(pbr.toBytes(), targetBuf); - - // continuing to read should now fail with EOFException - try { - si.readByte(); - fail("expected EOF"); - } catch (EOFException eof) { - // yay - } - - // try to read more than the stream contains - si.reset(); - try { - si.readBytes(targetBuf, 0, length * 2); - fail("expected IndexOutOfBoundsException: le > stream.length"); - } catch (IndexOutOfBoundsException ioob) { - // expected - } - } - - public void testStreamInputBulkReadWithOffset() throws IOException { - final int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - StreamInput si = pbr.streamInput(); - assertNotNull(si); - - // read a bunch of single bytes one by one - int offset = randomIntBetween(1, length / 2); - for (int i = 0; i < offset; i++) { - assertEquals(si.available(), length - i); - assertEquals(pbr.get(i), si.readByte()); - } - - // now do NOT reset the stream - keep the stream's offset! - - // buffer to compare remaining bytes against bulk read - byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length); - // randomized target buffer to ensure no stale slots - byte[] targetBytes = new byte[pbrBytesWithOffset.length]; - random().nextBytes(targetBytes); - - // bulk-read all - si.readFully(targetBytes); - assertArrayEquals(pbrBytesWithOffset, targetBytes); - assertEquals(si.available(), 0); - } - - public void testRandomReads() throws IOException { - int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - StreamInput streamInput = pbr.streamInput(); - BytesRefBuilder target = new BytesRefBuilder(); - while (target.length() < pbr.length()) { - switch (randomIntBetween(0, 10)) { - case 6: - case 5: - target.append(new BytesRef(new byte[]{streamInput.readByte()})); - break; - case 4: - case 3: - BytesRef bytesRef = streamInput.readBytesRef(scaledRandomIntBetween(1, pbr.length() - target.length())); - target.append(bytesRef); - break; - default: - byte[] buffer = new byte[scaledRandomIntBetween(1, pbr.length() - target.length())]; - int offset = scaledRandomIntBetween(0, buffer.length - 1); - int read = streamInput.read(buffer, offset, buffer.length - offset); - target.append(new BytesRef(buffer, offset, read)); - break; - } - } - assertEquals(pbr.length(), target.length()); - BytesRef targetBytes = target.get(); - assertArrayEquals(pbr.toBytes(), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); - } - - public void testSliceStreamInput() throws IOException { - int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - - // test stream input over slice (upper half of original) - int sliceOffset = randomIntBetween(1, length / 2); - int sliceLength = length - sliceOffset; - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - StreamInput sliceInput = slice.streamInput(); - assertEquals(sliceInput.available(), sliceLength); - - // single reads - assertEquals(slice.get(0), sliceInput.readByte()); - assertEquals(slice.get(1), sliceInput.readByte()); - assertEquals(slice.get(2), sliceInput.readByte()); - assertEquals(sliceInput.available(), sliceLength - 3); - - // reset the slice stream for bulk reading - sliceInput.reset(); - assertEquals(sliceInput.available(), sliceLength); - - // bulk read - byte[] sliceBytes = new byte[sliceLength]; - sliceInput.readFully(sliceBytes); - assertEquals(sliceInput.available(), 0); - - // compare slice content with upper half of original - byte[] pbrSliceBytes = Arrays.copyOfRange(pbr.toBytes(), sliceOffset, length); - assertArrayEquals(pbrSliceBytes, sliceBytes); - - // compare slice bytes with bytes read from slice via streamInput :D - byte[] sliceToBytes = slice.toBytes(); - assertEquals(sliceBytes.length, sliceToBytes.length); - assertArrayEquals(sliceBytes, sliceToBytes); - - sliceInput.reset(); - assertEquals(sliceInput.available(), sliceLength); - byte[] buffer = new byte[sliceLength + scaledRandomIntBetween(1, 100)]; - int offset = scaledRandomIntBetween(0, Math.max(1, buffer.length - sliceLength - 1)); - int read = sliceInput.read(buffer, offset, sliceLength / 2); - assertEquals(sliceInput.available(), sliceLength - read); - sliceInput.read(buffer, offset + read, sliceLength); - assertArrayEquals(sliceBytes, Arrays.copyOfRange(buffer, offset, offset + sliceLength)); - assertEquals(sliceInput.available(), 0); - } - - public void testWriteToOutputStream() throws IOException { - int length = randomIntBetween(10, PAGE_SIZE * 4); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesStreamOutput out = new BytesStreamOutput(); - pbr.writeTo(out); - assertEquals(pbr.length(), out.size()); - assertArrayEquals(pbr.toBytes(), out.bytes().toBytes()); - out.close(); - } - - public void testWriteToChannel() throws IOException { - int length = randomIntBetween(10, PAGE_SIZE * 4); - BytesReference pbr = getRandomizedPagedBytesReference(length); - Path tFile = createTempFile(); - try (FileChannel channel = FileChannel.open(tFile, StandardOpenOption.WRITE)) { - pbr.writeTo(channel); - assertEquals(pbr.length(), channel.position()); - } - assertArrayEquals(pbr.toBytes(), Files.readAllBytes(tFile)); - } - - public void testSliceWriteToOutputStream() throws IOException { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(1, length / 2); - int sliceLength = length - sliceOffset; - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength); - slice.writeTo(sliceOut); - assertEquals(slice.length(), sliceOut.size()); - assertArrayEquals(slice.toBytes(), sliceOut.bytes().toBytes()); - sliceOut.close(); - } - - public void testSliceWriteToChannel() throws IOException { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(1, length / 2); - int sliceLength = length - sliceOffset; - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - Path tFile = createTempFile(); - try (FileChannel channel = FileChannel.open(tFile, StandardOpenOption.WRITE)) { - slice.writeTo(channel); - assertEquals(slice.length(), channel.position()); - } - assertArrayEquals(slice.toBytes(), Files.readAllBytes(tFile)); - } - - public void testToBytes() { - int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; - - for (int i = 0; i < sizes.length; i++) { - BytesReference pbr = getRandomizedPagedBytesReference(sizes[i]); - byte[] bytes = pbr.toBytes(); - assertEquals(sizes[i], bytes.length); - // verify that toBytes() is cheap for small payloads - if (sizes[i] <= PAGE_SIZE) { - assertSame(bytes, pbr.toBytes()); - } else { - assertNotSame(bytes, pbr.toBytes()); - } - } - } - - public void testToBytesArraySharedPage() { - int length = randomIntBetween(10, PAGE_SIZE); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesArray ba = pbr.toBytesArray(); - BytesArray ba2 = pbr.toBytesArray(); - assertNotNull(ba); - assertNotNull(ba2); - assertEquals(pbr.length(), ba.length()); - assertEquals(ba.length(), ba2.length()); - // single-page optimization - assertSame(ba.array(), ba2.array()); - } - - public void testToBytesArrayMaterializedPages() { + public void testToBytesRefMaterializedPages() throws IOException { // we need a length != (n * pagesize) to avoid page sharing at boundaries int length = 0; while ((length % PAGE_SIZE) == 0) { length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); } - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesArray ba = pbr.toBytesArray(); - BytesArray ba2 = pbr.toBytesArray(); + BytesReference pbr = newBytesReference(length); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); assertNotNull(ba); assertNotNull(ba2); assertEquals(pbr.length(), ba.length()); @@ -349,181 +67,49 @@ public class PagedBytesReferenceTests extends ESTestCase { assertNotSame(ba.array(), ba2.array()); } - public void testCopyBytesArray() { - // small PBR which would normally share the first page - int length = randomIntBetween(10, PAGE_SIZE); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesArray ba = pbr.copyBytesArray(); - BytesArray ba2 = pbr.copyBytesArray(); - assertNotNull(ba); - assertNotSame(ba, ba2); - assertNotSame(ba.array(), ba2.array()); - } - - public void testSliceCopyBytesArray() { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - - BytesArray ba1 = slice.copyBytesArray(); - BytesArray ba2 = slice.copyBytesArray(); - assertNotNull(ba1); - assertNotNull(ba2); - assertNotSame(ba1.array(), ba2.array()); - assertArrayEquals(slice.toBytes(), ba1.array()); - assertArrayEquals(slice.toBytes(), ba2.array()); - assertArrayEquals(ba1.array(), ba2.array()); - } - - public void testToChannelBuffer() { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - ChannelBuffer cb = pbr.toChannelBuffer(); - assertNotNull(cb); - byte[] bufferBytes = new byte[length]; - cb.getBytes(0, bufferBytes); - assertArrayEquals(pbr.toBytes(), bufferBytes); - } - - public void testEmptyToChannelBuffer() { - BytesReference pbr = getRandomizedPagedBytesReference(0); - ChannelBuffer cb = pbr.toChannelBuffer(); - assertNotNull(cb); - assertEquals(0, pbr.length()); - assertEquals(0, cb.capacity()); - } - - public void testSliceToChannelBuffer() { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - ChannelBuffer cbSlice = slice.toChannelBuffer(); - assertNotNull(cbSlice); - byte[] sliceBufferBytes = new byte[sliceLength]; - cbSlice.getBytes(0, sliceBufferBytes); - assertArrayEquals(slice.toBytes(), sliceBufferBytes); - } - - public void testHasArray() { - int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(1, 3)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - // must return true for <= pagesize - assertEquals(length <= PAGE_SIZE, pbr.hasArray()); - } - - public void testArray() { + public void testSinglePage() throws IOException { int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; for (int i = 0; i < sizes.length; i++) { - BytesReference pbr = getRandomizedPagedBytesReference(sizes[i]); + BytesReference pbr = newBytesReference(sizes[i]); // verify that array() is cheap for small payloads if (sizes[i] <= PAGE_SIZE) { - byte[] array = pbr.array(); + BytesRef page = getSinglePageOrNull(pbr); + assertNotNull(page); + byte[] array = page.bytes; assertNotNull(array); assertEquals(sizes[i], array.length); - assertSame(array, pbr.array()); + assertSame(array, page.bytes); } else { - try { - pbr.array(); - fail("expected IllegalStateException"); - } catch (IllegalStateException isx) { - // expected + BytesRef page = getSinglePageOrNull(pbr); + if (pbr.length() > 0) { + assertNull(page); } } } } - public void testArrayOffset() { - int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - if (pbr.hasArray()) { - assertEquals(0, pbr.arrayOffset()); - } else { - try { - pbr.arrayOffset(); - fail("expected IllegalStateException"); - } catch (IllegalStateException ise) { - // expected + public void testToBytes() throws IOException { + int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; + + for (int i = 0; i < sizes.length; i++) { + BytesReference pbr = newBytesReference(sizes[i]); + byte[] bytes = BytesReference.toBytes(pbr); + assertEquals(sizes[i], bytes.length); + // verify that toBytes() is cheap for small payloads + if (sizes[i] <= PAGE_SIZE) { + assertSame(bytes, BytesReference.toBytes(pbr)); + } else { + assertNotSame(bytes, BytesReference.toBytes(pbr)); } } } - public void testSliceArrayOffset() { - int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); - BytesReference slice = pbr.slice(sliceOffset, sliceLength); - if (slice.hasArray()) { - assertEquals(sliceOffset, slice.arrayOffset()); - } else { - try { - slice.arrayOffset(); - fail("expected IllegalStateException"); - } catch (IllegalStateException ise) { - // expected - } - } - } - - public void testToUtf8() throws IOException { - // test empty - BytesReference pbr = getRandomizedPagedBytesReference(0); - assertEquals("", pbr.toUtf8()); - // TODO: good way to test? - } - - public void testToBytesRef() { - int length = randomIntBetween(0, PAGE_SIZE); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesRef ref = pbr.toBytesRef(); - assertNotNull(ref); - assertEquals(pbr.arrayOffset(), ref.offset); - assertEquals(pbr.length(), ref.length); - } - - public void testSliceToBytesRef() { - int length = randomIntBetween(0, PAGE_SIZE); - BytesReference pbr = getRandomizedPagedBytesReference(length); - // get a BytesRef from a slice - int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); - BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef(); - // note that these are only true if we have <= than a page, otherwise offset/length are shifted - assertEquals(sliceOffset, sliceRef.offset); - assertEquals(sliceLength, sliceRef.length); - } - - public void testCopyBytesRef() { - int length = randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesRef ref = pbr.copyBytesRef(); - assertNotNull(ref); - assertEquals(pbr.length(), ref.length); - } - - public void testHashCode() { - // empty content must have hash 1 (JDK compat) - BytesReference pbr = getRandomizedPagedBytesReference(0); - assertEquals(Arrays.hashCode(BytesRef.EMPTY_BYTES), pbr.hashCode()); - - // test with content - pbr = getRandomizedPagedBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5))); - int jdkHash = Arrays.hashCode(pbr.toBytes()); - int pbrHash = pbr.hashCode(); - assertEquals(jdkHash, pbrHash); - - // test hashes of slices - int sliceFrom = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); - BytesReference slice = pbr.slice(sliceFrom, sliceLength); - int sliceJdkHash = Arrays.hashCode(slice.toBytes()); - int sliceHash = slice.hashCode(); - assertEquals(sliceJdkHash, sliceHash); + public void testHasSinglePage() throws IOException { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(1, 3)); + BytesReference pbr = newBytesReference(length); + // must return true for <= pagesize + assertEquals(length <= PAGE_SIZE, getNumPages(pbr) == 1); } public void testEquals() { @@ -540,50 +126,10 @@ public class PagedBytesReferenceTests extends ESTestCase { BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length); assertEquals(pbr, pbr2); - } - - public void testEqualsPeerClass() { - int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = getRandomizedPagedBytesReference(length); - BytesReference ba = new BytesArray(pbr.toBytes()); - assertEquals(pbr, ba); - } - - public void testSliceEquals() { - int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); - ByteArray ba1 = bigarrays.newByteArray(length, false); - BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); - - // test equality of slices - int sliceFrom = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); - BytesReference slice1 = pbr.slice(sliceFrom, sliceLength); - BytesReference slice2 = pbr.slice(sliceFrom, sliceLength); - assertArrayEquals(slice1.toBytes(), slice2.toBytes()); - - // test a slice with same offset but different length, - // unless randomized testing gave us a 0-length slice. - if (sliceLength > 0) { - BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2); - assertFalse(Arrays.equals(slice1.toBytes(), slice3.toBytes())); - } - } - - private BytesReference getRandomizedPagedBytesReference(int length) { - // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content - ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); - try { - for (int i = 0; i < length; i++) { - out.writeByte((byte) random().nextInt(1 << 8)); - } - } catch (IOException e) { - fail("should not happen " + e.getMessage()); - } - assertThat(out.size(), Matchers.equalTo(length)); - BytesReference ref = out.bytes(); - assertThat(ref.length(), Matchers.equalTo(length)); - assertThat(ref, Matchers.instanceOf(PagedBytesReference.class)); - return ref; + int offsetToFlip = randomIntBetween(0, length - 1); + int value = ~Byte.toUnsignedInt(ba1.get(offsetToFlip)); + ba2.set(offsetToFlip, (byte)value); + assertNotEquals(pbr, pbr2); } } diff --git a/core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedStreamTestCase.java b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressTests.java similarity index 98% rename from core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedStreamTestCase.java rename to core/src/test/java/org/elasticsearch/common/compress/DeflateCompressTests.java index 0e94f6eaf80..33d11aa23d8 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedStreamTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressTests.java @@ -37,13 +37,9 @@ import java.util.concurrent.CountDownLatch; /** * Test streaming compression (e.g. used for recovery) */ -public abstract class AbstractCompressedStreamTestCase extends ESTestCase { +public class DeflateCompressTests extends ESTestCase { - private final Compressor compressor; - - protected AbstractCompressedStreamTestCase(Compressor compressor) { - this.compressor = compressor; - } + private final Compressor compressor = new DeflateCompressor(); public void testRandom() throws IOException { Random r = random(); diff --git a/core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedXContentTestCase.java b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java similarity index 60% rename from core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedXContentTestCase.java rename to core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java index d1c862f8a69..0ce95077965 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/AbstractCompressedXContentTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java @@ -35,13 +35,9 @@ import static org.hamcrest.Matchers.not; /** * */ -public abstract class AbstractCompressedXContentTestCase extends ESTestCase { +public class DeflateCompressedXContentTests extends ESTestCase { - private final Compressor compressor; - - protected AbstractCompressedXContentTestCase(Compressor compressor) { - this.compressor = compressor; - } + private final Compressor compressor = new DeflateCompressor(); private void assertEquals(CompressedXContent s1, CompressedXContent s2) { Assert.assertEquals(s1, s2); @@ -50,38 +46,26 @@ public abstract class AbstractCompressedXContentTestCase extends ESTestCase { } public void simpleTests() throws IOException { - Compressor defaultCompressor = CompressorFactory.defaultCompressor(); - try { - CompressorFactory.setDefaultCompressor(compressor); - String str = "---\nf:this is a simple string"; - CompressedXContent cstr = new CompressedXContent(str); - assertThat(cstr.string(), equalTo(str)); - assertThat(new CompressedXContent(str), equalTo(cstr)); + String str = "---\nf:this is a simple string"; + CompressedXContent cstr = new CompressedXContent(str); + assertThat(cstr.string(), equalTo(str)); + assertThat(new CompressedXContent(str), equalTo(cstr)); - String str2 = "---\nf:this is a simple string 2"; - CompressedXContent cstr2 = new CompressedXContent(str2); - assertThat(cstr2.string(), not(equalTo(str))); - assertThat(new CompressedXContent(str2), not(equalTo(cstr))); - assertEquals(new CompressedXContent(str2), cstr2); - } finally { - CompressorFactory.setDefaultCompressor(defaultCompressor); - } + String str2 = "---\nf:this is a simple string 2"; + CompressedXContent cstr2 = new CompressedXContent(str2); + assertThat(cstr2.string(), not(equalTo(str))); + assertThat(new CompressedXContent(str2), not(equalTo(cstr))); + assertEquals(new CompressedXContent(str2), cstr2); } public void testRandom() throws IOException { - Compressor defaultCompressor = CompressorFactory.defaultCompressor(); - try { - CompressorFactory.setDefaultCompressor(compressor); - Random r = random(); - for (int i = 0; i < 1000; i++) { - String string = TestUtil.randomUnicodeString(r, 10000); - // hack to make it detected as YAML - string = "---\n" + string; - CompressedXContent compressedXContent = new CompressedXContent(string); - assertThat(compressedXContent.string(), equalTo(string)); - } - } finally { - CompressorFactory.setDefaultCompressor(defaultCompressor); + Random r = random(); + for (int i = 0; i < 1000; i++) { + String string = TestUtil.randomUnicodeString(r, 10000); + // hack to make it detected as YAML + string = "---\n" + string; + CompressedXContent compressedXContent = new CompressedXContent(string); + assertThat(compressedXContent.string(), equalTo(string)); } } @@ -107,8 +91,8 @@ public abstract class AbstractCompressedXContentTestCase extends ESTestCase { // of different size are being used assertFalse(b1.equals(b2)); // we used the compressed representation directly and did not recompress - assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed()); - assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed()); + assertArrayEquals(BytesReference.toBytes(b1), new CompressedXContent(b1).compressed()); + assertArrayEquals(BytesReference.toBytes(b2), new CompressedXContent(b2).compressed()); // but compressedstring instances are still equal assertEquals(new CompressedXContent(b1), new CompressedXContent(b2)); } diff --git a/core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateCompressedStreamTests.java b/core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateCompressedStreamTests.java deleted file mode 100644 index a6d33585dbc..00000000000 --- a/core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateCompressedStreamTests.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.compress.deflate; - -import org.elasticsearch.common.compress.AbstractCompressedStreamTestCase; - -public class DeflateCompressedStreamTests extends AbstractCompressedStreamTestCase { - - public DeflateCompressedStreamTests() { - super(new DeflateCompressor()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 407c9790dbe..416299f8e7e 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -46,7 +46,7 @@ public class GeoDistanceTests extends ESTestCase { GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.FACTOR, GeoDistance.ARC, GeoDistance.SLOPPY_ARC); try (BytesStreamOutput out = new BytesStreamOutput()) { geoDistance.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) {; + try (StreamInput in = out.bytes().streamInput()) {; GeoDistance copy = GeoDistance.readFromStream(in); assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance); } @@ -60,7 +60,7 @@ public class GeoDistanceTests extends ESTestCase { } else { out.writeVInt(randomIntBetween(Integer.MIN_VALUE, -1)); } - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { GeoDistance.readFromStream(in); } catch (IOException e) { assertThat(e.getMessage(), containsString("Unknown GeoDistance ordinal [")); diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 566d2148cae..76376a4d30d 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -56,7 +56,7 @@ import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT */ public class GeoJSONShapeParserTests extends ESTestCase { - private final static GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); + private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); public void testParse_simplePoint() throws IOException { String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point") diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java index 6ee6a4fff83..e4eaa17874c 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java @@ -39,21 +39,21 @@ public class ShapeRelationTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.INTERSECTS.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.DISJOINT.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.WITHIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } @@ -62,19 +62,19 @@ public class ShapeRelationTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.INTERSECTS)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.DISJOINT)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.WITHIN)); } } @@ -83,7 +83,7 @@ public class ShapeRelationTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { ShapeRelation.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java index c2f29e6ecd7..b6eae97932f 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java @@ -38,14 +38,14 @@ public class SpatialStrategyTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { SpatialStrategy.TERM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { SpatialStrategy.RECURSIVE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -54,13 +54,13 @@ public class SpatialStrategyTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.TERM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.RECURSIVE)); } } @@ -69,7 +69,7 @@ public class SpatialStrategyTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { SpatialStrategy.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 9cbd4bb769d..4003a96e26f 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -137,7 +137,7 @@ public abstract class AbstractShapeBuilderTestCase exte static ShapeBuilder copyShape(ShapeBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return namedWriteableRegistry.getReader(ShapeBuilder.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java index 5c6c1e1789b..76b52c08a85 100644 --- a/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java @@ -84,7 +84,7 @@ public class StreamsTests extends ESTestCase { byte stuff[] = new byte[] { 0, 1, 2, 3 }; BytesRef stuffRef = new BytesRef(stuff, 2, 2); BytesArray stuffArray = new BytesArray(stuffRef); - StreamInput input = StreamInput.wrap(stuffArray); + StreamInput input = stuffArray.streamInput(); assertEquals(2, input.read()); assertEquals(3, input.read()); assertEquals(-1, input.read()); diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java index a4d15173a7c..dc57b0c70d4 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java @@ -60,7 +60,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase { protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { writeableEnum.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(ordinal)); } } @@ -70,7 +70,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase { protected void assertReadFromStream(final int ordinal, final Writeable expected) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(ordinal); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(reader.read(in), equalTo(expected)); } } diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 9fcbb708156..94f07369770 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.io.stream; import org.apache.lucene.util.Constants; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.util.BigArrays; @@ -48,7 +49,7 @@ public class BytesStreamsTests extends ESTestCase { // test empty stream to array assertEquals(0, out.size()); - assertEquals(0, out.bytes().toBytes().length); + assertEquals(0, out.bytes().length()); out.close(); } @@ -63,7 +64,7 @@ public class BytesStreamsTests extends ESTestCase { // write single byte out.writeByte(expectedData[0]); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -80,7 +81,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -108,14 +109,14 @@ public class BytesStreamsTests extends ESTestCase { byte[] expectedData = randomizedByteArrayWithSize(expectedSize); out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); // bulk-write again with actual bytes expectedSize = 10; expectedData = randomizedByteArrayWithSize(expectedSize); out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -130,7 +131,7 @@ public class BytesStreamsTests extends ESTestCase { out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -149,7 +150,7 @@ public class BytesStreamsTests extends ESTestCase { // now write the rest - more than fits into the remaining first page out.writeBytes(expectedData, initialOffset, additionalLength); assertEquals(expectedData.length, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -168,7 +169,7 @@ public class BytesStreamsTests extends ESTestCase { // ie. we cross over into a third out.writeBytes(expectedData, initialOffset, additionalLength); assertEquals(expectedData.length, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -185,7 +186,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -202,7 +203,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -219,7 +220,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -235,7 +236,7 @@ public class BytesStreamsTests extends ESTestCase { out.seek(position += BigArrays.BYTE_PAGE_SIZE + 10); out.seek(position += BigArrays.BYTE_PAGE_SIZE * 2); assertEquals(position, out.position()); - assertEquals(position, out.bytes().toBytes().length); + assertEquals(position, BytesReference.toBytes(out.bytes()).length); out.close(); } @@ -288,8 +289,8 @@ public class BytesStreamsTests extends ESTestCase { out.writeTimeZone(DateTimeZone.forID("CET")); out.writeOptionalTimeZone(DateTimeZone.getDefault()); out.writeOptionalTimeZone(null); - final byte[] bytes = out.bytes().toBytes(); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + final byte[] bytes = BytesReference.toBytes(out.bytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); assertEquals(in.available(), bytes.length); assertThat(in.readBoolean(), equalTo(false)); assertThat(in.readByte(), equalTo((byte)1)); @@ -328,7 +329,7 @@ public class BytesStreamsTests extends ESTestCase { namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class); @@ -348,7 +349,7 @@ public class BytesStreamsTests extends ESTestCase { public void testNamedWriteableUnknownCategory() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.writeNamedWriteable(new TestNamedWriteable("test1", "test2")); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), new NamedWriteableRegistry()); + StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), new NamedWriteableRegistry()); //no named writeable registered with given name, can write but cannot read it back IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); assertThat(e.getMessage(), equalTo("unknown named writeable category [" + BaseNamedWriteable.class.getName() + "]")); @@ -368,7 +369,7 @@ public class BytesStreamsTests extends ESTestCase { public void writeTo(StreamOutput out) throws IOException { } }); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), namedWriteableRegistry); + StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(BytesReference.toBytes(out.bytes())), namedWriteableRegistry); try { //no named writeable registered with given name under test category, can write but cannot read it back in.readNamedWriteable(BaseNamedWriteable.class); @@ -382,7 +383,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2"); out.writeNamedWriteable(testNamedWriteable); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); try { in.readNamedWriteable(BaseNamedWriteable.class); fail("Expected UnsupportedOperationException"); @@ -397,7 +398,7 @@ public class BytesStreamsTests extends ESTestCase { namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); @@ -407,7 +408,7 @@ public class BytesStreamsTests extends ESTestCase { public void testOptionalWriteableReaderReturnsNull() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream.")); } @@ -423,7 +424,7 @@ public class BytesStreamsTests extends ESTestCase { }); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); @@ -442,7 +443,7 @@ public class BytesStreamsTests extends ESTestCase { final BytesStreamOutput out = new BytesStreamOutput(); out.writeStreamableList(expected); - final StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); List loaded = in.readStreamableList(TestStreamable::new); @@ -458,7 +459,7 @@ public class BytesStreamsTests extends ESTestCase { out.close(); } - private static abstract class BaseNamedWriteable implements NamedWriteable { + private abstract static class BaseNamedWriteable implements NamedWriteable { } @@ -537,7 +538,7 @@ public class BytesStreamsTests extends ESTestCase { // toByteArray() must fail try { - out.bytes().toBytes(); + BytesReference.toBytes(out.bytes()); fail("expected IllegalStateException: stream closed"); } catch (IllegalStateException iex1) { @@ -558,7 +559,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); out.writeGenericValue(geoPoint); - StreamInput wrap = StreamInput.wrap(out.bytes()); + StreamInput wrap = out.bytes().streamInput(); GeoPoint point = (GeoPoint) wrap.readGenericValue(); assertEquals(point, geoPoint); } @@ -566,7 +567,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); out.writeGeoPoint(geoPoint); - StreamInput wrap = StreamInput.wrap(out.bytes()); + StreamInput wrap = out.bytes().streamInput(); GeoPoint point = wrap.readGeoPoint(); assertEquals(point, geoPoint); } diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index 72f933462e0..06d39398c8e 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -19,15 +19,13 @@ package org.elasticsearch.common.io.stream; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; -import java.io.FilterInputStream; import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -35,6 +33,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class StreamTests extends ESTestCase { public void testRandomVLongSerialization() throws IOException { for (int i = 0; i < 1024; i++) { @@ -61,8 +61,8 @@ public class StreamTests extends ESTestCase { for (Tuple value : values) { BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(value.v1()); - assertArrayEquals(Long.toString(value.v1()), value.v2(), out.bytes().toBytes()); - ByteBufferBytesReference bytes = new ByteBufferBytesReference(ByteBuffer.wrap(value.v2())); + assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes())); + BytesReference bytes = new BytesArray(value.v2()); assertEquals(Arrays.toString(value.v2()), (long)value.v1(), bytes.streamInput().readZLong()); } } @@ -121,4 +121,62 @@ public class StreamTests extends ESTestCase { streamInput.readBytes(new byte[bytesToRead], 0, bytesToRead); assertEquals(streamInput.available(), length - bytesToRead); } + + public void testWritableArrays() throws IOException { + + final String[] strings = generateRandomStringArray(10, 10, false, true); + WriteableString[] sourceArray = Arrays.stream(strings).map(WriteableString::new).toArray(WriteableString[]::new); + WriteableString[] targetArray; + BytesStreamOutput out = new BytesStreamOutput(); + + if (randomBoolean()) { + if (randomBoolean()) { + sourceArray = null; + } + out.writeOptionalArray(sourceArray); + targetArray = out.bytes().streamInput().readOptionalArray(WriteableString::new, WriteableString[]::new); + } else { + out.writeArray(sourceArray); + targetArray = out.bytes().streamInput().readArray(WriteableString::new, WriteableString[]::new); + } + + assertThat(targetArray, equalTo(sourceArray)); + } + + static final class WriteableString implements Writeable { + final String string; + + public WriteableString(String string) { + this.string = string; + } + + public WriteableString(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + WriteableString that = (WriteableString) o; + + return string.equals(that.string); + + } + + @Override + public int hashCode() { + return string.hashCode(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(string); + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 224a3cf6c88..33a0b855a7a 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; @@ -27,6 +28,8 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; @@ -35,9 +38,11 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.util.Bits; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -49,9 +54,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; -/** - * - */ public class LuceneTests extends ESTestCase { public void testWaitForIndex() throws Exception { final MockDirectoryWrapper dir = newMockDirectory(); @@ -355,6 +357,45 @@ public class LuceneTests extends ESTestCase { dir.close(); } + public void testAsSequentialAccessBits() throws Exception { + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new KeywordAnalyzer())); + + Document doc = new Document(); + doc.add(new StringField("foo", "bar", Store.NO)); + w.addDocument(doc); + + doc = new Document(); + w.addDocument(doc); + + doc = new Document(); + doc.add(new StringField("foo", "bar", Store.NO)); + w.addDocument(doc); + + + try (DirectoryReader reader = DirectoryReader.open(w)) { + IndexSearcher searcher = newSearcher(reader); + Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, false); + assertEquals(1, reader.leaves().size()); + LeafReaderContext leafReaderContext = searcher.getIndexReader().leaves().get(0); + Bits bits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), termWeight.scorer(leafReaderContext)); + + expectThrows(IndexOutOfBoundsException.class, () -> bits.get(-1)); + expectThrows(IndexOutOfBoundsException.class, () -> bits.get(leafReaderContext.reader().maxDoc())); + assertTrue(bits.get(0)); + assertTrue(bits.get(0)); + assertFalse(bits.get(1)); + assertFalse(bits.get(1)); + expectThrows(IllegalArgumentException.class, () -> bits.get(0)); + assertTrue(bits.get(2)); + assertTrue(bits.get(2)); + expectThrows(IllegalArgumentException.class, () -> bits.get(1)); + } + + w.close(); + dir.close(); + } + /** * Test that the "unmap hack" is detected as supported by lucene. * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index d54edbcaa9d..131636e2011 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.network; import org.elasticsearch.action.support.replication.ReplicationTask; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.ModuleTestCase; @@ -36,11 +36,8 @@ import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.cat.AbstractCatAction; -import org.elasticsearch.rest.action.cat.RestNodesAction; -import org.elasticsearch.rest.action.main.RestMainAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.transport.Transport; @@ -62,7 +59,7 @@ public class NetworkModuleTests extends ModuleTestCase { } } - static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { public FakeHttpTransport() { super(null); } @@ -90,18 +87,18 @@ public class NetworkModuleTests extends ModuleTestCase { static class FakeRestHandler extends BaseRestHandler { public FakeRestHandler() { - super(null, null); + super(null); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {} + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception {} } static class FakeCatRestHandler extends AbstractCatAction { public FakeCatRestHandler() { - super(null, null, null); + super(null); } @Override - protected void doRequest(RestRequest request, RestChannel channel, Client client) {} + protected void doRequest(RestRequest request, RestChannel channel, NodeClient client) {} @Override protected void documentation(StringBuilder sb) {} @Override @@ -163,32 +160,6 @@ public class NetworkModuleTests extends ModuleTestCase { assertFalse(module.isTransportClient()); } - public void testRegisterRestHandler() { - Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); - module.registerRestHandler(FakeRestHandler.class); - // also check a builtin is bound - assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class); - - // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true, new NamedWriteableRegistry()); - try { - module.registerRestHandler(FakeRestHandler.class); - fail(); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Cannot register rest handler")); - assertTrue(e.getMessage().contains("for transport client")); - } - } - - public void testRegisterCatRestHandler() { - Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); - module.registerRestHandler(FakeCatRestHandler.class); - // also check a builtin is bound - assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class); - } - public void testRegisterTaskStatus() { NamedWriteableRegistry registry = new NamedWriteableRegistry(); Settings settings = Settings.EMPTY; diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index e82d37a5cf5..f9e5f6e3fbb 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.rounding; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.rounding.TimeZoneRounding.TimeIntervalRounding; import org.elasticsearch.common.rounding.TimeZoneRounding.TimeUnitRounding; import org.elasticsearch.common.unit.TimeValue; @@ -31,10 +32,13 @@ import org.joda.time.DateTimeConstants; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -328,29 +332,70 @@ public class TimeZoneRoundingTests extends ESTestCase { long interval = unit.toMillis(randomIntBetween(1, 365)); DateTimeZone tz = randomDateTimeZone(); TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz); - long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - try { - final long roundedDate = rounding.round(date); - final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); - assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate))); - assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); - assertThat("Values smaller than rounded value should round further down", rounding.round(roundedDate - 1), - lessThan(roundedDate)); + long mainDate = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 + if (randomBoolean()) { + mainDate = nastyDate(mainDate, tz, interval); + } + // check two intervals around date + long previousRoundedValue = Long.MIN_VALUE; + for (long date = mainDate - 2 * interval; date < mainDate + 2 * interval; date += interval / 2) { + try { + final long roundedDate = rounding.round(date); + final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); + assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate))); + assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); + assertThat("Values smaller than rounded value should round further down", rounding.round(roundedDate - 1), + lessThan(roundedDate)); + assertThat("Rounding should be >= previous rounding value", roundedDate, greaterThanOrEqualTo(previousRoundedValue)); - if (tz.isFixed()) { - assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); - assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, - equalTo(interval)); - assertThat("NextRounding value should be a rounded date", nextRoundingValue, - equalTo(rounding.round(nextRoundingValue))); + if (tz.isFixed()) { + assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); + assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, + equalTo(interval)); + assertThat("NextRounding value should be a rounded date", nextRoundingValue, + equalTo(rounding.round(nextRoundingValue))); + } + previousRoundedValue = roundedDate; + } catch (AssertionError e) { + logger.error("Rounding error at {}, timezone {}, interval: {},", new DateTime(date, tz), tz, interval); + throw e; } - } catch (AssertionError e) { - logger.error("Rounding error at {}, timezone {}, interval: {},", new DateTime(date, tz), tz, interval); - throw e; } } } + /** + * Test that rounded values are always greater or equal to last rounded value if date is increasing. + * The example covers an interval around 2011-10-30T02:10:00+01:00, time zone CET, interval: 2700000ms + */ + public void testIntervalRoundingMonotonic_CET() { + long interval = TimeUnit.MINUTES.toMillis(45); + DateTimeZone tz = DateTimeZone.forID("CET"); + TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz); + List> expectedDates = new ArrayList>(); + // first date is the date to be rounded, second the expected result + expectedDates.add(new Tuple<>("2011-10-30T01:40:00.000+02:00", "2011-10-30T01:30:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:02:30.000+02:00", "2011-10-30T01:30:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:25:00.000+02:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:47:30.000+02:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:10:00.000+01:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:32:30.000+01:00", "2011-10-30T02:15:00.000+01:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:55:00.000+01:00", "2011-10-30T02:15:00.000+01:00")); + expectedDates.add(new Tuple<>("2011-10-30T03:17:30.000+01:00", "2011-10-30T03:00:00.000+01:00")); + + long previousDate = Long.MIN_VALUE; + for (Tuple dates : expectedDates) { + final long roundedDate = rounding.round(time(dates.v1())); + assertThat(roundedDate, isDate(time(dates.v2()), tz)); + assertThat(roundedDate, greaterThanOrEqualTo(previousDate)); + previousDate = roundedDate; + } + // here's what this means for interval widths + assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T02:15:00.000+02:00") - time("2011-10-30T01:30:00.000+02:00")); + assertEquals(TimeUnit.MINUTES.toMillis(60), time("2011-10-30T02:15:00.000+01:00") - time("2011-10-30T02:15:00.000+02:00")); + assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T03:00:00.000+01:00") - time("2011-10-30T02:15:00.000+01:00")); + } + /** * special test for DST switch from #9491 */ diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 664f8cb96ab..dee20d6b32e 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -98,7 +98,7 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(0, aC.get()); assertEquals(0, bC.get()); try { - service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); + service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", -15).build()); fail("invalid value"); } catch (IllegalArgumentException ex) { assertEquals("illegal value can't update [foo.bar.baz] from [1] to [-15]", ex.getMessage()); @@ -108,7 +108,7 @@ public class ScopedSettingsTests extends ESTestCase { assertEquals(0, consumer2.get()); assertEquals(0, aC.get()); assertEquals(0, bC.get()); - service.dryRun(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); + service.validateUpdate(Settings.builder().put("foo.bar", 2).put("foo.bar.baz", 15).build()); assertEquals(0, consumer.get()); assertEquals(0, consumer2.get()); assertEquals(0, aC.get()); diff --git a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java index 45db5a33d21..1a3fa4db137 100644 --- a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java +++ b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java @@ -51,7 +51,7 @@ public class BoundTransportAddressTests extends ESTestCase { // serialize BytesStreamOutput streamOutput = new BytesStreamOutput(); transportAddress.writeTo(streamOutput); - StreamInput in = ByteBufferStreamInput.wrap(streamOutput.bytes()); + StreamInput in = streamOutput.bytes().streamInput(); BoundTransportAddress serializedAddress; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java index f9a4d3f22af..7c5463baed2 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java @@ -82,7 +82,7 @@ public class DistanceUnitTests extends ESTestCase { for (DistanceUnit unit : DistanceUnit.values()) { try (BytesStreamOutput out = new BytesStreamOutput()) { unit.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat("Roundtrip serialisation failed.", DistanceUnit.readFromStream(in), equalTo(unit)); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 2b5a7c00e5d..3f6f1848fd8 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -145,7 +145,7 @@ public class FuzzinessTests extends ESTestCase { private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { BytesStreamOutput output = new BytesStreamOutput(); in.writeTo(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); return new Fuzziness(streamInput); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index cc36625e68f..003d78ce42e 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -30,9 +30,12 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.object.HasToString.hasToString; public class TimeValueTests extends ESTestCase { @@ -85,9 +88,6 @@ public class TimeValueTests extends ESTestCase { assertEquals(new TimeValue(10, TimeUnit.SECONDS), TimeValue.parseTimeValue("10S", null, "test")); - assertEquals(new TimeValue(100, TimeUnit.MILLISECONDS), - TimeValue.parseTimeValue("0.1s", null, "test")); - assertEquals(new TimeValue(10, TimeUnit.MINUTES), TimeValue.parseTimeValue("10 m", null, "test")); assertEquals(new TimeValue(10, TimeUnit.MINUTES), @@ -115,14 +115,45 @@ public class TimeValueTests extends ESTestCase { assertEquals(new TimeValue(10, TimeUnit.DAYS), TimeValue.parseTimeValue("10D", null, "test")); - assertEquals(new TimeValue(70, TimeUnit.DAYS), - TimeValue.parseTimeValue("10 w", null, "test")); - assertEquals(new TimeValue(70, TimeUnit.DAYS), - TimeValue.parseTimeValue("10w", null, "test")); - assertEquals(new TimeValue(70, TimeUnit.DAYS), - TimeValue.parseTimeValue("10 W", null, "test")); - assertEquals(new TimeValue(70, TimeUnit.DAYS), - TimeValue.parseTimeValue("10W", null, "test")); + final int length = randomIntBetween(0, 8); + final String zeros = new String(new char[length]).replace('\0', '0'); + assertTrue(TimeValue.parseTimeValue("-" + zeros + "1", null, "test") == TimeValue.MINUS_ONE); + assertTrue(TimeValue.parseTimeValue(zeros + "0", null, "test") == TimeValue.ZERO); + } + + public void testRoundTrip() { + final String s = randomTimeValue(); + assertThat(TimeValue.parseTimeValue(s, null, "test").getStringRep(), equalTo(s)); + final TimeValue t = new TimeValue(randomIntBetween(1, 128), randomFrom(TimeUnit.values())); + assertThat(TimeValue.parseTimeValue(t.getStringRep(), null, "test"), equalTo(t)); + } + + private static final String FRACTIONAL_TIME_VALUES_ARE_NOT_SUPPORTED = "fractional time values are not supported"; + + public void testNonFractionalTimeValues() { + final String s = randomAsciiOfLength(10) + randomTimeUnit(); + final ElasticsearchParseException e = + expectThrows(ElasticsearchParseException.class, () -> TimeValue.parseTimeValue(s, null, "test")); + assertThat(e, hasToString(containsString("failed to parse [" + s + "]"))); + assertThat(e, not(hasToString(containsString(FRACTIONAL_TIME_VALUES_ARE_NOT_SUPPORTED)))); + assertThat(e.getCause(), instanceOf(NumberFormatException.class)); + } + + public void testFractionalTimeValues() { + double value; + do { + value = randomDouble(); + } while (value == 0); + final String s = Double.toString(randomIntBetween(0, 128) + value) + randomTimeUnit(); + final ElasticsearchParseException e = + expectThrows(ElasticsearchParseException.class, () -> TimeValue.parseTimeValue(s, null, "test")); + assertThat(e, hasToString(containsString("failed to parse [" + s + "]"))); + assertThat(e, hasToString(containsString(FRACTIONAL_TIME_VALUES_ARE_NOT_SUPPORTED))); + assertThat(e.getCause(), instanceOf(NumberFormatException.class)); + } + + private String randomTimeUnit() { + return randomFrom("nanos", "micros", "ms", "s", "m", "h", "d"); } private void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException { @@ -130,17 +161,24 @@ public class TimeValueTests extends ESTestCase { value.writeTo(out); assertEquals(expectedSize, out.size()); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); TimeValue inValue = new TimeValue(in); assertThat(inValue, equalTo(value)); + assertThat(inValue.duration(), equalTo(value.duration())); + assertThat(inValue.timeUnit(), equalTo(value.timeUnit())); } public void testSerialize() throws Exception { - assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 8); - assertEqualityAfterSerialize(timeValueNanos(-1), 1); - assertEqualityAfterSerialize(timeValueNanos(1), 1); - assertEqualityAfterSerialize(timeValueSeconds(30), 6); + assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 3); + assertEqualityAfterSerialize(timeValueNanos(-1), 2); + assertEqualityAfterSerialize(timeValueNanos(1), 2); + assertEqualityAfterSerialize(timeValueSeconds(30), 2); + + final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values())); + BytesStreamOutput out = new BytesStreamOutput(); + out.writeZLong(timeValue.duration()); + assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length()); } public void testFailOnUnknownUnits() { @@ -148,7 +186,7 @@ public class TimeValueTests extends ESTestCase { TimeValue.parseTimeValue("23tw", null, "test"); fail("Expected ElasticsearchParseException"); } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), containsString("Failed to parse")); + assertThat(e.getMessage(), containsString("failed to parse")); } } @@ -157,7 +195,7 @@ public class TimeValueTests extends ESTestCase { TimeValue.parseTimeValue("42", null, "test"); fail("Expected ElasticsearchParseException"); } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), containsString("Failed to parse")); + assertThat(e.getMessage(), containsString("failed to parse")); } } @@ -166,7 +204,7 @@ public class TimeValueTests extends ESTestCase { TimeValue.parseTimeValue("42ms.", null, "test"); fail("Expected ElasticsearchParseException"); } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), containsString("Failed to parse")); + assertThat(e.getMessage(), containsString("failed to parse")); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java b/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java index a89cb48c37a..729c431d2b2 100644 --- a/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java @@ -132,7 +132,7 @@ public class CancellableThreadsTests extends ESTestCase { public void testCancellableThreads() throws InterruptedException { Thread[] threads = new Thread[randomIntBetween(3, 10)]; final TestPlan[] plans = new TestPlan[threads.length]; - final Throwable[] throwables = new Throwable[threads.length]; + final Exception[] exceptions = new Exception[threads.length]; final boolean[] interrupted = new boolean[threads.length]; final CancellableThreads cancellableThreads = new CancellableThreads(); final CountDownLatch readyForCancel = new CountDownLatch(threads.length); @@ -153,8 +153,8 @@ public class CancellableThreadsTests extends ESTestCase { } else { cancellableThreads.execute(new TestRunnable(plan, readyForCancel)); } - } catch (Throwable t) { - throwables[plan.id] = t; + } catch (Exception e) { + exceptions[plan.id] = e; } if (plan.exceptBeforeCancel || plan.exitBeforeCancel) { // we have to mark we're ready now (actually done). @@ -176,19 +176,19 @@ public class CancellableThreadsTests extends ESTestCase { TestPlan plan = plans[i]; final Class exceptionClass = plan.ioException ? IOCustomException.class : CustomException.class; if (plan.exceptBeforeCancel) { - assertThat(throwables[i], Matchers.instanceOf(exceptionClass)); + assertThat(exceptions[i], Matchers.instanceOf(exceptionClass)); } else if (plan.exitBeforeCancel) { - assertNull(throwables[i]); + assertNull(exceptions[i]); } else { // in all other cases, we expect a cancellation exception. - assertThat(throwables[i], Matchers.instanceOf(CancellableThreads.ExecutionCancelledException.class)); + assertThat(exceptions[i], Matchers.instanceOf(CancellableThreads.ExecutionCancelledException.class)); if (plan.exceptAfterCancel) { - assertThat(throwables[i].getSuppressed(), + assertThat(exceptions[i].getSuppressed(), Matchers.arrayContaining( Matchers.instanceOf(exceptionClass) )); } else { - assertThat(throwables[i].getSuppressed(), Matchers.emptyArray()); + assertThat(exceptions[i].getSuppressed(), Matchers.emptyArray()); } } assertThat(interrupted[plan.id], Matchers.equalTo(plan.presetInterrupt)); diff --git a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java index 26d6af1cd5f..5302ba8d55c 100644 --- a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -67,7 +67,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgradeCustomDataPath() throws IOException { Path customPath = createTempDir(); final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); @@ -96,7 +96,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testPartialUpgradeCustomDataPath() throws IOException { Path customPath = createTempDir(); final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); @@ -136,7 +136,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgrade() throws IOException { final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() @@ -159,7 +159,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgradeIndices() throws IOException { final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { Map> indexSettingsMap = new HashMap<>(); for (int i = 0; i < randomIntBetween(2, 5); i++) { @@ -256,7 +256,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { .numberOfReplicas(0) .build(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - IndexMetaData.FORMAT.write(indexState, 1, nodeEnvironment.indexPaths(index)); + IndexMetaData.FORMAT.write(indexState, nodeEnvironment.indexPaths(index)); assertFalse(IndexFolderUpgrader.needsUpgrade(index, index.getUUID())); } } @@ -305,7 +305,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { for (int i = 0; i < nodePaths.length; i++) { oldIndexPaths[i] = nodePaths[i].indicesPath.resolve(indexSettings.getIndex().getName()); } - IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), 1, oldIndexPaths); + IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), oldIndexPaths); for (int id = 0; id < indexSettings.getNumberOfShards(); id++) { Path oldIndexPath = randomFrom(oldIndexPaths); ShardId shardId = new ShardId(indexSettings.getIndex(), id); @@ -316,7 +316,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { writeShard(shardId, oldIndexPath, numIdxFiles, numTranslogFiles); } ShardStateMetaData state = new ShardStateMetaData(true, indexSettings.getUUID(), AllocationId.newInitializing()); - ShardStateMetaData.FORMAT.write(state, 1, oldIndexPath.resolve(String.valueOf(shardId.getId()))); + ShardStateMetaData.FORMAT.write(state, oldIndexPath.resolve(String.valueOf(shardId.getId()))); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java index 8d4efab8fa1..02adb783197 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.util.concurrent; +import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.test.ESTestCase; @@ -47,7 +48,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -67,6 +68,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { inOrder.verifyNoMoreInteractions(); } + @SuppressLoggerChecks(reason = "mock usage") public void testDoRunDoesNotRunWhenStoppedOrClosed() throws Exception { Callable runCallable = mock(Callable.class); @@ -75,7 +77,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -104,7 +106,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -143,7 +145,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java index 54491aade6f..2373b30e1b2 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java @@ -37,8 +37,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("It should not fail"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override @@ -57,8 +57,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); } @Override @@ -76,8 +76,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("It should not fail"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override @@ -91,7 +91,7 @@ public class AbstractRunnableTests extends ESTestCase { afterCallable.call(); } catch (Exception e) { - fail("Unexpected for mock."); + fail(e.toString()); } } }; @@ -111,8 +111,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); } @Override @@ -126,7 +126,7 @@ public class AbstractRunnableTests extends ESTestCase { afterCallable.call(); } catch (Exception e) { - fail("Unexpected for mock."); + fail(e.toString()); } } }; @@ -142,14 +142,15 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); try { failureCallable.call(); } - catch (Exception e) { - fail("Unexpected for mock."); + catch (Exception inner) { + inner.addSuppressed(e); + fail(inner.toString()); } } @@ -165,8 +166,8 @@ public class AbstractRunnableTests extends ESTestCase { public void testIsForceExecutuonDefaultsFalse() { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("Not tested"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index 57da614e689..72db2911fc0 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -88,8 +88,8 @@ public class EsExecutorsTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - throw new AssertionError(t); + public void onFailure(Exception e) { + throw new AssertionError(e); } }); @@ -178,7 +178,7 @@ public class EsExecutorsTests extends ESTestCase { try { barrier.await(); barrier.await(); - } catch (Throwable e) { + } catch (Exception e) { barrier.reset(e); } } @@ -214,7 +214,7 @@ public class EsExecutorsTests extends ESTestCase { try { barrier.await(); barrier.await(); - } catch (Throwable e) { + } catch (Exception e) { barrier.reset(e); } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java rename to core/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java index f9451375590..36335ee78d0 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/KeyedLockTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.util.concurrent.KeyedLock; diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java index 9338beccb9a..c5d0ec4257e 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java @@ -88,7 +88,7 @@ public class RefCountedTests extends ESTestCase { final MyRefCounted counted = new MyRefCounted(); Thread[] threads = new Thread[randomIntBetween(2, 5)]; final CountDownLatch latch = new CountDownLatch(1); - final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread() { @Override @@ -103,7 +103,7 @@ public class RefCountedTests extends ESTestCase { counted.decRef(); } } - } catch (Throwable e) { + } catch (Exception e) { exceptions.add(e); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java deleted file mode 100644 index 83db2d4a7c6..00000000000 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.util.concurrent; - -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.test.ESTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class SuspendableRefContainerTests extends ESTestCase { - - public void testBasicAcquire() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - assertThat(refContainer.activeRefs(), equalTo(0)); - - Releasable lock1 = randomLockingMethod(refContainer); - assertThat(refContainer.activeRefs(), equalTo(1)); - Releasable lock2 = randomLockingMethod(refContainer); - assertThat(refContainer.activeRefs(), equalTo(2)); - lock1.close(); - assertThat(refContainer.activeRefs(), equalTo(1)); - lock1.close(); // check idempotence - assertThat(refContainer.activeRefs(), equalTo(1)); - lock2.close(); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - public void testAcquisitionBlockingBlocksNewAcquisitions() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - assertThat(refContainer.activeRefs(), equalTo(0)); - - try (Releasable block = refContainer.blockAcquisition()) { - assertThat(refContainer.activeRefs(), equalTo(0)); - assertThat(refContainer.tryAcquire(), nullValue()); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - try (Releasable lock = refContainer.tryAcquire()) { - assertThat(refContainer.activeRefs(), equalTo(1)); - } - - // same with blocking acquire - AtomicBoolean acquired = new AtomicBoolean(); - Thread t = new Thread(() -> { - try (Releasable lock = randomBoolean() ? refContainer.acquire() : refContainer.acquireUninterruptibly()) { - acquired.set(true); - assertThat(refContainer.activeRefs(), equalTo(1)); - } catch (InterruptedException e) { - fail("Interrupted"); - } - }); - try (Releasable block = refContainer.blockAcquisition()) { - assertThat(refContainer.activeRefs(), equalTo(0)); - t.start(); - // check that blocking acquire really blocks - assertThat(acquired.get(), equalTo(false)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - t.join(); - assertThat(acquired.get(), equalTo(true)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - public void testAcquisitionBlockingWaitsOnExistingAcquisitions() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - - AtomicBoolean acquired = new AtomicBoolean(); - Thread t = new Thread(() -> { - try (Releasable block = refContainer.blockAcquisition()) { - acquired.set(true); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - }); - try (Releasable lock = randomLockingMethod(refContainer)) { - assertThat(refContainer.activeRefs(), equalTo(1)); - t.start(); - assertThat(acquired.get(), equalTo(false)); - assertThat(refContainer.activeRefs(), equalTo(1)); - } - t.join(); - assertThat(acquired.get(), equalTo(true)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - private Releasable randomLockingMethod(SuspendableRefContainer refContainer) throws InterruptedException { - switch (randomInt(2)) { - case 0: return refContainer.tryAcquire(); - case 1: return refContainer.acquire(); - case 2: return refContainer.acquireUninterruptibly(); - } - throw new IllegalArgumentException("randomLockingMethod inconsistent"); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index 1a582d48f6b..e6726879513 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -154,7 +154,7 @@ public class ThreadContextTests extends ESTestCase { assertNull(threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); - threadContext.readHeaders(StreamInput.wrap(out.bytes())); + threadContext.readHeaders(out.bytes().streamInput()); assertEquals("bar", threadContext.getHeader("foo")); assertNull(threadContext.getTransient("ctx.foo")); } @@ -179,14 +179,14 @@ public class ThreadContextTests extends ESTestCase { { Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); ThreadContext otherhreadContext = new ThreadContext(otherSettings); - otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + otherhreadContext.readHeaders(out.bytes().streamInput()); assertEquals("bar", otherhreadContext.getHeader("foo")); assertNull(otherhreadContext.getTransient("ctx.foo")); assertEquals("1", otherhreadContext.getHeader("default")); } } - + public void testSerializeInDifferentContextNoDefaults() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); { @@ -202,7 +202,7 @@ public class ThreadContextTests extends ESTestCase { { Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); ThreadContext otherhreadContext = new ThreadContext(otherSettings); - otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + otherhreadContext.readHeaders(out.bytes().streamInput()); assertEquals("bar", otherhreadContext.getHeader("foo")); assertNull(otherhreadContext.getTransient("ctx.foo")); @@ -294,8 +294,8 @@ public class ThreadContextTests extends ESTestCase { } return new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - throw new RuntimeException(t); + public void onFailure(Exception e) { + throw new RuntimeException(e); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index f3592936765..bef4a047ef5 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -68,7 +68,7 @@ public class ConstructingObjectParserTests extends ESTestCase { assertEquals(expected.b, parsed.b); assertEquals(expected.c, parsed.c); assertEquals(expected.d, parsed.d); - } catch (Throwable e) { + } catch (Exception e) { // It is convenient to decorate the error message with the json throw new Exception("Error parsing: [" + builder.string() + "]", e); } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 159d8a97be4..a8d26e87ecf 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.test.ESTestCase; public class ObjectParserTests extends ESTestCase { - private final static ParseFieldMatcherSupplier STRICT_PARSING = () -> ParseFieldMatcher.STRICT; + private static final ParseFieldMatcherSupplier STRICT_PARSING = () -> ParseFieldMatcher.STRICT; public void testBasics() throws IOException { XContentParser parser = XContentType.JSON.xContent().createParser( diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 583234461b3..8319873878a 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -57,11 +57,10 @@ public class XContentFactoryTests extends ESTestCase { builder.endObject(); assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type)); - BytesArray bytesArray = builder.bytes().toBytesArray(); - assertThat(XContentFactory.xContentType(StreamInput.wrap(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length())), equalTo(type)); + assertThat(XContentFactory.xContentType(builder.bytes().streamInput()), equalTo(type)); // CBOR is binary, cannot use String - if (type != XContentType.CBOR) { + if (type != XContentType.CBOR && type != XContentType.SMILE) { assertThat(XContentFactory.xContentType(builder.string()), equalTo(type)); } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index 34944e713bd..fe69fc1f05d 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -94,7 +94,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.startObject(); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -102,7 +102,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -110,7 +110,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.field("test", "value"); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -119,7 +119,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -129,7 +129,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } } @@ -161,15 +161,14 @@ public class XContentBuilderTests extends ESTestCase { gen.writeEndObject(); gen.close(); - byte[] data = bos.bytes().toBytes(); - String sData = new String(data, "UTF8"); + String sData = bos.bytes().utf8ToString(); assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}")); } public void testByteConversion() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test_name", (Byte)(byte)120).endObject(); - assertThat(builder.bytes().toUtf8(), equalTo("{\"test_name\":120}")); + assertThat(builder.bytes().utf8ToString(), equalTo("{\"test_name\":120}")); } public void testDateTypesConversion() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java index bf2dd442b64..efbca114aac 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java @@ -63,7 +63,8 @@ public class JsonVsCborTests extends ESTestCase { xsonGen.close(); jsonGen.close(); - verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes().toBytes())); + verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()), + XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes())); } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java index 9e686fe78f1..63b19a63822 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java @@ -63,7 +63,8 @@ public class JsonVsSmileTests extends ESTestCase { xsonGen.close(); jsonGen.close(); - verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes().toBytes())); + verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()), + XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes())); } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java index e3d8735e05e..b8b38a543f6 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; import java.io.IOException; import static org.hamcrest.CoreMatchers.is; @@ -45,7 +44,7 @@ public abstract class AbstractFilteringJsonGeneratorTestCase extends ESTestCase assertNotNull(expected); // Verify that the result is equal to the expected string - assertThat(builder.bytes().toUtf8(), is(expected.bytes().toUtf8())); + assertThat(builder.bytes().utf8ToString(), is(expected.bytes().utf8ToString())); } protected void assertBinary(XContentBuilder expected, XContentBuilder builder) { @@ -1166,15 +1165,15 @@ public abstract class AbstractFilteringJsonGeneratorTestCase extends ESTestCase // Test method: rawField(String fieldName, InputStream content) assertXContentBuilder(expectedRawField, - newXContentBuilder().startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*", true).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("r*", false).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*", true).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("f*", false).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); } public void testArrays() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java index dd2fe42eb8e..8dbefedb249 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java @@ -142,7 +142,7 @@ public class FilterPathGeneratorFilteringTests extends ESTestCase { } } } - assertThat(os.bytes().toUtf8(), equalTo(replaceQuotes(expected))); + assertThat(os.bytes().utf8ToString(), equalTo(replaceQuotes(expected))); } } diff --git a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java index 4efedd9154a..c25a0a6503b 100644 --- a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java +++ b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java @@ -56,8 +56,7 @@ public class JacksonLocationTests extends ESTestCase { gen.close(); - byte[] data = os.bytes().toBytes(); - JsonParser parser = new JsonFactory().createParser(data); + JsonParser parser = new JsonFactory().createParser(os.bytes().streamInput()); assertThat(parser.nextToken(), equalTo(JsonToken.START_OBJECT)); assertThat(parser.nextToken(), equalTo(JsonToken.FIELD_NAME)); // "index" diff --git a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java index f6aac190c4b..4ff4c4cd035 100644 --- a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.discovery; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.test.ESTestCase; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase { - static private class PublishResponder extends AbstractRunnable { + private static class PublishResponder extends AbstractRunnable { final boolean fail; final DiscoveryNode node; @@ -58,8 +58,8 @@ public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase } @Override - public void onFailure(Throwable t) { - logger.error("unexpected error", t); + public void onFailure(Exception e) { + logger.error("unexpected error", e); } @Override @@ -77,7 +77,7 @@ public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase int nodeCount = scaledRandomIntBetween(10, 20); DiscoveryNode[] allNodes = new DiscoveryNode[nodeCount]; for (int i = 0; i < nodeCount; i++) { - DiscoveryNode node = new DiscoveryNode("node_" + i, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("node_" + i, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); allNodes[i] = node; } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 109c59d845a..a1bac928daf 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import org.elasticsearch.test.disruption.BlockClusterStateProcessing; +import org.elasticsearch.test.disruption.BridgePartition; import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; import org.elasticsearch.test.disruption.LongGCDisruption; import org.elasticsearch.test.disruption.NetworkDelaysPartition; @@ -169,7 +170,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { return nodes; } - final static Settings DEFAULT_SETTINGS = Settings.builder() + static final Settings DEFAULT_SETTINGS = Settings.builder() .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out @@ -371,6 +372,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { * This test isolates the master from rest of the cluster, waits for a new master to be elected, restores the partition * and verifies that all node agree on the new cluster state */ + @TestLogging("_root:DEBUG,cluster.service:TRACE,gateway:TRACE,indices.store:TRACE") public void testIsolateMasterAndVerifyClusterStateConsensus() throws Exception { final List nodes = startCluster(3); @@ -446,8 +448,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { final int seconds = !(TEST_NIGHTLY && rarely()) ? 1 : 5; final String timeout = seconds + "s"; - // TODO: add node count randomizaion - final List nodes = startCluster(3); + final List nodes = startCluster(rarely() ? 5 : 3); assertAcked(prepareCreate("test") .setSettings(Settings.builder() @@ -502,8 +503,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } } catch (InterruptedException e) { // fine - semaphore interrupt - } catch (Throwable t) { - logger.info("unexpected exception in background thread of [{}]", t, node); + } catch (AssertionError | Exception e) { + logger.info("unexpected exception in background thread of [{}]", e, node); } } }); @@ -539,7 +540,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("stopping disruption"); disruptionScheme.stopDisrupting(); for (String node : internalCluster().getNodeNames()) { - ensureStableCluster(3, TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() + + ensureStableCluster(nodes.size(), TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() + DISRUPTION_HEALING_OVERHEAD.millis()), true, node); } ensureGreen("test"); @@ -547,7 +548,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("validating successful docs"); for (String node : nodes) { try { - logger.debug("validating through node [{}]", node); + logger.debug("validating through node [{}] ([{}] acked docs)", node, ackedDocs.size()); for (String id : ackedDocs.keySet()) { assertTrue("doc [" + id + "] indexed via node [" + ackedDocs.get(id) + "] not found", client(node).prepareGet("test", "type", id).setPreference("_local").get().isExists()); @@ -689,8 +690,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("failure [{}]", t, source); + public void onFailure(String source, Exception e) { + logger.warn("failure [{}]", e, source); } }); @@ -959,7 +960,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -1191,7 +1192,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { new NetworkUnresponsivePartition(random()), new NetworkDelaysPartition(random()), new NetworkDisconnectPartition(random()), - new SlowClusterStateProcessing(random()) + new SlowClusterStateProcessing(random()), + new BridgePartition(random(), randomBoolean()) ); Collections.shuffle(list, random()); setDisruptionScheme(list.get(0)); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java index 0f93e5d460c..b31b0cbaa55 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESTestCase; @@ -46,7 +46,7 @@ public class ElectMasterServiceTests extends ESTestCase { if (randomBoolean()) { roles.add(DiscoveryNode.Role.MASTER); } - DiscoveryNode node = new DiscoveryNode("n_" + i, "n_" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node = new DiscoveryNode("n_" + i, "n_" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); nodes.add(node); } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 135352343b6..cd2b4eaf2e4 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; @@ -32,7 +31,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -41,6 +39,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -50,6 +49,8 @@ import org.junit.Before; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -68,6 +69,7 @@ import static java.util.Collections.emptySet; import static java.util.Collections.shuffle; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -99,10 +101,9 @@ public class NodeJoinControllerTests extends ESTestCase { // make sure we have a master setState(clusterService, ClusterState.builder(clusterService.state()).nodes( DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.getId()))); - nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new ElectMasterService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - Settings.EMPTY); + nodeJoinController = new NodeJoinController(clusterService, new NoopAllocationService(Settings.EMPTY), + new ElectMasterService(Settings.EMPTY), new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), Settings.EMPTY); } @After @@ -198,17 +199,19 @@ public class NodeJoinControllerTests extends ESTestCase { final SimpleFuture electionFuture = new SimpleFuture("master election"); final Thread masterElection = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error from waitToBeElectedAsMaster", t); - electionFuture.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error from waitToBeElectedAsMaster", e); + electionFuture.markAsFailed(e); } @Override protected void doRun() throws Exception { - nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { + nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), + new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); electionFuture.markAsDone(); } @@ -246,17 +249,19 @@ public class NodeJoinControllerTests extends ESTestCase { final SimpleFuture electionFuture = new SimpleFuture("master election"); final Thread masterElection = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error from waitToBeElectedAsMaster", t); - electionFuture.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error from waitToBeElectedAsMaster", e); + electionFuture.markAsFailed(e); } @Override protected void doRun() throws Exception { - nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { + nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), + new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); electionFuture.markAsDone(); } @@ -298,7 +303,8 @@ public class NodeJoinControllerTests extends ESTestCase { } logger.debug("--> asserting master election didn't finish yet"); - assertThat("election finished after [" + initialJoins + "] master nodes but required joins is [" + requiredJoins + "]", electionFuture.isDone(), equalTo(false)); + assertThat("election finished after [" + initialJoins + "] master nodes but required joins is [" + requiredJoins + "]", + electionFuture.isDone(), equalTo(false)); final int finalJoins = requiredJoins - initialJoins + randomInt(5); nodesToJoin.clear(); @@ -374,7 +380,8 @@ public class NodeJoinControllerTests extends ESTestCase { nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueMillis(1), new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); latch.countDown(); } @@ -403,7 +410,7 @@ public class NodeJoinControllerTests extends ESTestCase { public void testNewClusterStateOnExistingNodeJoin() throws InterruptedException, ExecutionException { ClusterState state = clusterService.state(); final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(state.nodes()); - final DiscoveryNode other_node = new DiscoveryNode("other_node", DummyTransportAddress.INSTANCE, + final DiscoveryNode other_node = new DiscoveryNode("other_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); nodesBuilder.put(other_node); setState(clusterService, ClusterState.builder(state).nodes(nodesBuilder)); @@ -425,9 +432,9 @@ public class NodeJoinControllerTests extends ESTestCase { nodes.add(node); threads[i] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error in join thread", t); - backgroundExceptions.add(t); + public void onFailure(Exception e) { + logger.error("unexpected error in join thread", e); + backgroundExceptions.add(e); } @Override @@ -468,9 +475,9 @@ public class NodeJoinControllerTests extends ESTestCase { nodes.add(node); threads[i] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error in join thread", t); - backgroundExceptions.add(t); + public void onFailure(Exception e) { + logger.error("unexpected error in join thread", e); + backgroundExceptions.add(e); } @Override @@ -492,7 +499,8 @@ public class NodeJoinControllerTests extends ESTestCase { nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); latch.countDown(); } @@ -515,17 +523,37 @@ public class NodeJoinControllerTests extends ESTestCase { assertNodesInCurrentState(nodes); } + public void testRejectingJoinWithSameAddressButDifferentId() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode other_node = new DiscoveryNode("other_node", state.nodes().getLocalNode().getAddress(), + emptyMap(), emptySet(), Version.CURRENT); - static class NoopRoutingService extends RoutingService { + ExecutionException e = expectThrows(ExecutionException.class, () -> joinNode(other_node)); + assertThat(e.getMessage(), containsString("found existing node")); + } - public NoopRoutingService(Settings settings) { - super(settings, null, new NoopAllocationService(settings)); - } + public void testRejectingJoinWithSameIdButDifferentAddress() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode other_node = new DiscoveryNode(state.nodes().getLocalNode().getId(), + new LocalTransportAddress(randomAsciiOfLength(20)), emptyMap(), emptySet(), Version.CURRENT); - @Override - protected void performReroute(String reason) { + ExecutionException e = expectThrows(ExecutionException.class, () -> joinNode(other_node)); + assertThat(e.getMessage(), containsString("found existing node")); + } - } + public void testJoinWithSameIdSameAddressButDifferentMeta() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode localNode = state.nodes().getLocalNode(); + final DiscoveryNode other_node = new DiscoveryNode( + randomBoolean() ? localNode.getName() : "other_name", + localNode.getId(), localNode.getAddress(), + randomBoolean() ? localNode.getAttributes() : Collections.singletonMap("attr", "other"), + randomBoolean() ? localNode.getRoles() : new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))), + randomBoolean() ? localNode.getVersion() : VersionUtils.randomVersion(random())); + + joinNode(other_node); + + assertThat(clusterService.localNode(), equalTo(other_node)); } static class NoopAllocationService extends AllocationService { @@ -535,12 +563,14 @@ public class NodeJoinControllerTests extends ESTestCase { } @Override - public RoutingAllocation.Result applyStartedShards(ClusterState clusterState, List startedShards, boolean withReroute) { + public RoutingAllocation.Result applyStartedShards(ClusterState clusterState, List startedShards, + boolean withReroute) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } @Override - public RoutingAllocation.Result applyFailedShards(ClusterState clusterState, List failedShards) { + public RoutingAllocation.Result applyFailedShards(ClusterState clusterState, + List failedShards) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } @@ -581,7 +611,7 @@ public class NodeJoinControllerTests extends ESTestCase { } } - final static AtomicInteger joinId = new AtomicInteger(); + static final AtomicInteger joinId = new AtomicInteger(); private SimpleFuture joinNodeAsync(final DiscoveryNode node) throws InterruptedException { final SimpleFuture future = new SimpleFuture("join of " + node + " (id [" + joinId.incrementAndGet() + "]"); @@ -596,9 +626,9 @@ public class NodeJoinControllerTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - logger.error("unexpected error for {}", t, future); - future.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error for {}", e, future); + future.markAsFailed(e); } }); return future; @@ -608,8 +638,8 @@ public class NodeJoinControllerTests extends ESTestCase { * creates an object clone of node, so it will be a different object instance */ private DiscoveryNode cloneNode(DiscoveryNode node) { - return new DiscoveryNode(node.getName(), node.getId(), node.getHostName(), node.getHostAddress(), node.getAddress(), - node.getAttributes(), node.getRoles(), node.getVersion()); + return new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), node.getHostName(), node.getHostAddress(), + node.getAddress(), node.getAttributes(), node.getRoles(), node.getVersion()); } private void joinNode(final DiscoveryNode node) throws InterruptedException, ExecutionException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index c5f92e62de0..3d0d9ddd8b1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -261,8 +261,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { - holder.set((IllegalStateException) t); + public void onFailure(Exception e) { + holder.set((IllegalStateException) e); } }); @@ -297,7 +297,7 @@ public class ZenDiscoveryIT extends ESIntegTestCase { Settings nodeSettings = Settings.builder() .put("discovery.type", "zen") // <-- To override the local setting if set externally .build(); - String nodeName = internalCluster().startNode(nodeSettings, Version.CURRENT); + String nodeName = internalCluster().startNode(nodeSettings); ZenDiscovery zenDiscovery = (ZenDiscovery) internalCluster().getInstance(Discovery.class, nodeName); ClusterService clusterService = internalCluster().getInstance(ClusterService.class, nodeName); DiscoveryNode node = new DiscoveryNode("_node_id", new InetSocketTransportAddress(InetAddress.getByName("0.0.0.0"), 0), @@ -309,8 +309,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { - holder.set((IllegalStateException) t); + public void onFailure(Exception e) { + holder.set((IllegalStateException) e); } }); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index a6638eb19cf..9db83f48f0e 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.test.ESTestCase; @@ -51,9 +50,9 @@ public class ZenDiscoveryUnitTests extends ESTestCase { ClusterName clusterName = new ClusterName("abc"); DiscoveryNodes.Builder currentNodes = DiscoveryNodes.builder(); - currentNodes.masterNodeId("a").put(new DiscoveryNode("a", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + currentNodes.masterNodeId("a").put(new DiscoveryNode("a", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); DiscoveryNodes.Builder newNodes = DiscoveryNodes.builder(); - newNodes.masterNodeId("a").put(new DiscoveryNode("a", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + newNodes.masterNodeId("a").put(new DiscoveryNode("a", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); ClusterState.Builder currentState = ClusterState.builder(clusterName); currentState.nodes(currentNodes); @@ -71,7 +70,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase { assertFalse("should not ignore, because new state's version is higher to current state's version", shouldIgnoreOrRejectNewClusterState(logger, currentState.build(), newState.build())); currentNodes = DiscoveryNodes.builder(); - currentNodes.masterNodeId("b").put(new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + currentNodes.masterNodeId("b").put(new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); ; // version isn't taken into account, so randomize it to ensure this. if (randomBoolean()) { @@ -109,7 +108,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase { ArrayList allNodes = new ArrayList<>(); for (int i = randomIntBetween(10, 20); i >= 0; i--) { Set roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); - DiscoveryNode node = new DiscoveryNode("node_" + i, "id_" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node = new DiscoveryNode("node_" + i, "id_" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); responses.add(new ZenPing.PingResponse(node, randomBoolean() ? null : node, new ClusterName("test"), randomBoolean())); allNodes.add(node); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java index 8aa5114c387..72674f44e3d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.test.ESTestCase; @@ -42,7 +42,7 @@ public class ZenPingTests extends ESTestCase { boolean hasJoinedOncePerNode[] = new boolean[nodes.length]; ArrayList pings = new ArrayList<>(); for (int i = 0; i < nodes.length; i++) { - nodes[i] = new DiscoveryNode("" + i, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + nodes[i] = new DiscoveryNode("" + i, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); } for (int pingCount = scaledRandomIntBetween(10, nodes.length * 10); pingCount > 0; pingCount--) { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index f072c5faf8a..7715749fdf6 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.ping.unicast; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,6 +30,7 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; @@ -43,7 +43,6 @@ import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.transport.netty.NettyTransport; -import org.jboss.netty.util.internal.ConcurrentHashMap; import java.net.InetSocketAddress; import java.util.concurrent.ConcurrentMap; @@ -209,7 +208,7 @@ public class UnicastZenPingIT extends ESTestCase { final TransportService transportService = new TransportService(settings, transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); - ConcurrentMap counters = new ConcurrentHashMap<>(); + ConcurrentMap counters = ConcurrentCollections.newConcurrentMap(); transportService.addConnectionListener(new TransportConnectionListener() { @Override public void onNodeConnected(DiscoveryNode node) { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java index ab9aed6ba44..42aa792c95f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueue.ClusterStateContext; import org.elasticsearch.test.ESTestCase; @@ -237,7 +237,7 @@ public class PendingClusterStatesQueueTests extends ESTestCase { ClusterState state = lastClusterStatePerMaster[masterIndex]; if (state == null) { state = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).nodes(DiscoveryNodes.builder() - .put(new DiscoveryNode(masters[masterIndex], DummyTransportAddress.INSTANCE, + .put(new DiscoveryNode(masters[masterIndex], LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),Version.CURRENT)).masterNodeId(masters[masterIndex]).build() ).build(); } else { @@ -259,8 +259,8 @@ public class PendingClusterStatesQueueTests extends ESTestCase { } @Override - public void onNewClusterStateFailed(Throwable t) { - failure = t; + public void onNewClusterStateFailed(Exception e) { + failure = e; } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 61374cc0d8f..0d4274a5a53 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -152,16 +153,17 @@ public class PublishClusterStateActionTests extends ESTestCase { return createMockNode(name, settings, null); } - public MockNode createMockNode(String name, Settings settings, @Nullable ClusterStateListener listener) throws Exception { - settings = Settings.builder() + public MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener) throws Exception { + final Settings settings = Settings.builder() .put("name", name) .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put(settings) + .put(basSettings) .build(); MockTransportService service = buildTransportService(settings); DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(settings); - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(service.boundAddress().publishAddress()); + DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(service.boundAddress().publishAddress(), + () -> NodeEnvironment.generateNodeId(settings)); MockNode node = new MockNode(discoveryNode, service, listener, logger); node.action = buildPublishClusterStateAction(settings, service, () -> node.clusterState, node); final CountDownLatch latch = new CountDownLatch(nodes.size() * 2 + 1); @@ -797,9 +799,9 @@ public class PublishClusterStateActionTests extends ESTestCase { } @Override - public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { - if (t != null) { - errors.add(new Tuple<>(node, t)); + public void onNodeAck(DiscoveryNode node, @Nullable Exception e) { + if (e != null) { + errors.add(new Tuple<>(node, e)); } countDown.countDown(); } @@ -910,8 +912,8 @@ public class PublishClusterStateActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { - this.error.set(error); + public void sendResponse(Exception exception) throws IOException { + this.error.set(exception); assertThat(response.get(), nullValue()); } diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index ad425d8afc9..50e05d97985 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -22,7 +22,6 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; @@ -48,12 +47,11 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.not; @LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to allow extras public class NodeEnvironmentTests extends ESTestCase { @@ -269,9 +267,9 @@ public class NodeEnvironmentTests extends ESTestCase { if (randomBoolean()) { Thread t = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error", t); - threadException.set(t); + public void onFailure(Exception e) { + logger.error("unexpected error", e); + threadException.set(e); latch.countDown(); blockLatch.countDown(); } @@ -392,7 +390,7 @@ public class NodeEnvironmentTests extends ESTestCase { env.close(); NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp", - Settings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false).build()); + Settings.builder().put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), false).build()); assertThat(env2.availableShardPaths(sid), equalTo(env2.availableShardPaths(sid))); assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/" + index.getUUID() + "/0"))); @@ -450,6 +448,27 @@ public class NodeEnvironmentTests extends ESTestCase { } } + public void testPersistentNodeId() throws IOException { + String[] paths = tmpPaths(); + NodeEnvironment env = newNodeEnvironment(paths, Settings.builder() + .put("node.local_storage", false) + .put("node.master", false) + .put("node.data", false) + .build()); + String nodeID = env.nodeId(); + env.close(); + env = newNodeEnvironment(paths, Settings.EMPTY); + assertThat("previous node didn't have local storage enabled, id should change", env.nodeId(), not(equalTo(nodeID))); + nodeID = env.nodeId(); + env.close(); + env = newNodeEnvironment(paths, Settings.EMPTY); + assertThat(env.nodeId(), equalTo(nodeID)); + env.close(); + env = newNodeEnvironment(Settings.EMPTY); + assertThat(env.nodeId(), not(equalTo(nodeID))); + env.close(); + } + /** Converts an array of Strings to an array of Paths, adding an additional child if specified */ private Path[] stringsToPaths(String[] strings, String additional) { Path[] locations = new Path[strings.length]; diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 9ce2aa44ab6..092e6eaff8a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,11 +45,11 @@ import static org.hamcrest.Matchers.sameInstance; /** */ public class AsyncShardFetchTests extends ESTestCase { - private final DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + private final DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); private final Response response1 = new Response(node1); private final Throwable failure1 = new Throwable("simulated failure 1"); - private final DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + private final DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); private final Response response2 = new Response(node2); private final Throwable failure2 = new Throwable("simulate failure 2"); @@ -270,8 +270,9 @@ public class AsyncShardFetchTests extends ESTestCase { } @Override - protected void asyncFetch(final ShardId shardId, String[] nodesIds) { - for (final String nodeId : nodesIds) { + protected void asyncFetch(final ShardId shardId, DiscoveryNode[] nodes) { + for (final DiscoveryNode node : nodes) { + final String nodeId = node.getId(); threadPool.generic().execute(new Runnable() { @Override public void run() { @@ -291,7 +292,7 @@ public class AsyncShardFetchTests extends ESTestCase { } else { processAsyncFetch(shardId, Collections.singletonList(entry.response), null); } - } catch (Throwable e) { + } catch (Exception e) { logger.error("unexpected failure", e); } finally { if (entry != null) { diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index d5dee3c1bdc..f86b56f1052 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -298,15 +298,10 @@ public class GatewayIndexStateIT extends ESIntegTestCase { assertThat(client().prepareGet("test", "type1", "1").execute().actionGet().isExists(), equalTo(true)); logger.info("--> restarting the nodes"); - final Gateway gateway1 = internalCluster().getInstance(GatewayService.class, node_1).getGateway(); internalCluster().fullRestart(new RestartCallback() { @Override - public Settings onNodeStopped(String nodeName) throws Exception { - if (node_1.equals(nodeName)) { - logger.info("--> deleting the data for the first node"); - gateway1.reset(); - } - return null; + public boolean clearData(String nodeName) { + return node_1.equals(nodeName); } }); @@ -348,10 +343,10 @@ public class GatewayIndexStateIT extends ESIntegTestCase { } else { // test with a shadow replica index final Path dataPath = createTempDir(); - logger.info("--> created temp data path for shadow replicas [" + dataPath + "]"); + logger.info("--> created temp data path for shadow replicas [{}]", dataPath); logger.info("--> starting a cluster with " + numNodes + " nodes"); final Settings nodeSettings = Settings.builder() - .put("node.add_id_to_custom_path", false) + .put("node.add_lock_id_to_custom_path", false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath.toString()) .put("index.store.fs.fs_lock", randomFrom("native", "simple")) .build(); @@ -431,7 +426,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { // this one is not validated ahead of time and breaks allocation .put("index.analysis.filter.myCollator.type", "icu_collation") ).build(); - IndexMetaData.FORMAT.write(brokenMeta, brokenMeta.getVersion(), services.indexPaths(brokenMeta.getIndex())); + IndexMetaData.FORMAT.write(brokenMeta, services.indexPaths(brokenMeta.getIndex())); } internalCluster().fullRestart(); // ensureGreen(closedIndex) waits for the index to show up in the metadata @@ -446,13 +441,6 @@ public class GatewayIndexStateIT extends ESIntegTestCase { assertNotNull(ex.getCause()); assertEquals(IllegalArgumentException.class, ex.getCause().getClass()); assertEquals(ex.getCause().getMessage(), "Unknown tokenfilter type [icu_collation] for [myCollator]"); - - client().admin().indices().prepareUpdateSettings() - .setSettings(Settings.builder().putNull("index.analysis.filter.myCollator.type")).get(); - client().admin().indices().prepareOpen("test").get(); - ensureYellow(); - logger.info("--> verify 1 doc in the index"); - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } /** @@ -495,7 +483,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { for (NodeEnvironment services : internalCluster().getInstances(NodeEnvironment.class)) { IndexMetaData brokenMeta = IndexMetaData.builder(metaData).settings(metaData.getSettings() .filter((s) -> "index.analysis.analyzer.test.tokenizer".equals(s) == false)).build(); - IndexMetaData.FORMAT.write(brokenMeta, brokenMeta.getVersion(), services.indexPaths(brokenMeta.getIndex())); + IndexMetaData.FORMAT.write(brokenMeta, services.indexPaths(brokenMeta.getIndex())); } internalCluster().fullRestart(); // ensureGreen(closedIndex) waits for the index to show up in the metadata @@ -510,13 +498,6 @@ public class GatewayIndexStateIT extends ESIntegTestCase { assertNotNull(ex.getCause()); assertEquals(MapperParsingException.class, ex.getCause().getClass()); assertEquals(ex.getCause().getMessage(), "analyzer [test] not found for field [field1]"); - - client().admin().indices().prepareUpdateSettings() - .setSettings(Settings.builder().put("index.analysis.analyzer.test.tokenizer", "keyword")).get(); - client().admin().indices().prepareOpen("test").get(); - ensureYellow(); - logger.info("--> verify 1 doc in the index"); - assertHitCount(client().prepareSearch().setQuery(matchQuery("field1", "value one")).get(), 1L); } public void testArchiveBrokenClusterSettings() throws Exception { @@ -540,7 +521,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { MetaData brokenMeta = MetaData.builder(metaData).persistentSettings(Settings.builder() .put(metaData.persistentSettings()).put("this.is.unknown", true) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "broken").build()).build(); - MetaData.FORMAT.write(brokenMeta, metaData.version(), nodeEnv.nodeDataPaths()); + MetaData.FORMAT.write(brokenMeta, nodeEnv.nodeDataPaths()); } internalCluster().fullRestart(); ensureYellow("test"); // wait for state recovery diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java index 4f65c5fafdd..1b62f5d330a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -40,7 +39,7 @@ public class GatewayServiceTests extends ESTestCase { .put("http.enabled", "false") .put("discovery.type", "local") .put(settings.build()).build(), - null, clusterService, null, null, null, null, new NoopDiscovery(), null, null); + null, clusterService, null, null, null, new NoopDiscovery(), null, null); } public void testDefaultRecoverAfterTime() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 41eba406009..4cf505d839a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -104,7 +104,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -119,7 +119,7 @@ public class MetaDataStateFormatTests extends ESTestCase { } final int version2 = between(version, Integer.MAX_VALUE); DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - format.write(state2, version2, dirs); + format.write(state2, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -146,7 +146,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -170,7 +170,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -261,7 +261,7 @@ public class MetaDataStateFormatTests extends ESTestCase { } } for (int j = numLegacy; j < numStates; j++) { - format.write(meta.get(j), j, dirs[i]); + format.write(meta.get(j), dirs[i]); if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here.... Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st"); corruptedFiles.add(file); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index 4999ef5eac5..795046ba10c 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -161,8 +161,8 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { logger.info("checking if meta state exists..."); try { assertTrue("Expecting meta state of index " + indexName + " to be on node " + nodeName, getIndicesMetaDataOnNode(nodeName).containsKey(indexName)); - } catch (Throwable t) { - logger.info("failed to load meta state", t); + } catch (Exception e) { + logger.info("failed to load meta state", e); fail("could not load meta state"); } } diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 96e360550af..e64c816c4bf 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RestoreSource; @@ -41,6 +40,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardStateMetaData; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; @@ -547,7 +547,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return addData(node, version, allocationId, primary, null); } - public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Throwable storeException) { + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Exception storeException) { if (data == null) { data = new HashMap<>(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java index d6e8d61a7a6..1e35bcdd469 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.hasItem; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class RecoverAfterNodesIT extends ESIntegTestCase { - private final static TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + private static final TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10); public Set waitForNoBlocksOnNode(TimeValue timeout, Client nodeClient) throws InterruptedException { long start = System.currentTimeMillis(); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index aaa29ad1970..59f01f56ce1 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -565,7 +565,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { TransportNodesListGatewayStartedShards.NodesGatewayStartedShards response; response = internalCluster().getInstance(TransportNodesListGatewayStartedShards.class) - .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new String[]{node.getId()})) + .execute(new TransportNodesListGatewayStartedShards.Request(shardId, new DiscoveryNode[]{node})) .get(); assertThat(response.getNodes(), hasSize(1)); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 99d6c56c88d..b417553a609 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -36,11 +36,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; @@ -118,8 +118,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testSimpleFullMatchAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -131,8 +131,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testSyncIdMatch() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -144,8 +144,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testFileChecksumMatch() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); DiscoveryNode nodeToMatch = randomBoolean() ? node2 : node3; - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(nodeToMatch, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(nodeToMatch, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(nodeToMatch.getId())); @@ -159,7 +159,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoPrimaryData() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node2, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -171,7 +171,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoDataForReplicaOnAnyNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -183,8 +183,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoMatchingFilesForReplicaOnAnyNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "NO_MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).get(0).shardId(), equalTo(shardId)); @@ -196,8 +196,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testNoOrThrottleDecidersRemainsInUnassigned() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(randomBoolean() ? noAllocationDeciders() : throttleAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); @@ -209,7 +209,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { */ public void testThrottleWhenAllocatingToMatchingNode() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(new AllocationDeciders(Settings.EMPTY, - new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new AllocationDecider(Settings.EMPTY) { + new AllocationDecider[]{new TestAllocateDecision(Decision.YES), new SameShardAllocationDecider(Settings.EMPTY), + new AllocationDecider(Settings.EMPTY) { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (node.node().equals(node2)) { @@ -218,8 +219,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { return Decision.YES; } }})); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); testAllocator.allocateUnassigned(allocation); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); @@ -228,10 +229,10 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testDelayedAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); if (randomBoolean()) { // we sometime return empty list of files, make sure we test this as well - testAllocator.addData(node2, false, null); + testAllocator.addData(node2, null); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -240,7 +241,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); - testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); @@ -249,9 +250,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testCancelRecoveryBetterSyncId() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node3, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node3, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(1)); @@ -260,9 +261,9 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testNotCancellingRecoveryIfSyncedOnExistingRecovery() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node3, false, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node3, randomBoolean() ? "MATCH" : "NO_MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); @@ -270,8 +271,8 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testNotCancellingRecovery() { RoutingAllocation allocation = onePrimaryOnNode1And1ReplicaRecovering(yesAllocationDeciders()); - testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) - .addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); + testAllocator.addData(node1, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")) + .addData(node2, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); boolean changed = testAllocator.processExistingRecoveries(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(0)); @@ -352,7 +353,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { return fetchDataCalled.getAndSet(false); } - public TestAllocator addData(DiscoveryNode node, boolean allocated, String syncId, StoreFileMetaData... files) { + public TestAllocator addData(DiscoveryNode node, String syncId, StoreFileMetaData... files) { if (data == null) { data = new HashMap<>(); } @@ -364,7 +365,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { if (syncId != null) { commitData.put(Engine.SYNC_COMMIT_ID, syncId); } - data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(allocated, shardId, + data.put(node, new TransportNodesListShardStoreMetaData.StoreFilesMetaData(shardId, new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt()))); return this; } diff --git a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java index 2ba7da84c14..9a93cb4d7ea 100644 --- a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java +++ b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.http; -import org.elasticsearch.cluster.service.ClusterService; +import java.util.Map; + import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -33,7 +33,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -43,10 +42,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Map; - public class HttpServerTests extends ESTestCase { private static final ByteSizeValue BREAKER_LIMIT = new ByteSizeValue(20); private HttpServer httpServer; @@ -66,17 +61,13 @@ public class HttpServerTests extends ESTestCase { HttpServerTransport httpServerTransport = new TestHttpServerTransport(); RestController restController = new RestController(settings); restController.registerHandler(RestRequest.Method.GET, "/", - (request, channel) -> channel.sendResponse( + (request, channel, client) -> channel.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); - restController.registerHandler(RestRequest.Method.GET, "/error", (request, channel) -> { + restController.registerHandler(RestRequest.Method.GET, "/error", (request, channel, client) -> { throw new IllegalArgumentException("test error"); }); - ClusterService clusterService = new ClusterService(Settings.EMPTY, - new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null); - NodeService nodeService = new NodeService(Settings.EMPTY, null, null, null, null, null, null, null, null, - clusterService, null); - httpServer = new HttpServer(settings, httpServerTransport, restController, nodeService, circuitBreakerService); + httpServer = new HttpServer(settings, httpServerTransport, restController, null, circuitBreakerService); httpServer.start(); } @@ -129,7 +120,7 @@ public class HttpServerTests extends ESTestCase { assertEquals(0, inFlightRequestsBreaker.getUsed()); } - private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements + private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { public TestHttpServerTransport() { @@ -202,7 +193,7 @@ public class HttpServerTests extends ESTestCase { private TestRestRequest(String path, String content) { this.path = path; - this.content = new ByteBufferBytesReference(ByteBuffer.wrap(content.getBytes(StandardCharsets.UTF_8))); + this.content = new BytesArray(content); } @Override diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java index 264876b7963..0aeb00914e2 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java @@ -104,7 +104,7 @@ public class NettyHttpClient implements Closeable { } @SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods. - private final Collection processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple... urisAndBodies) throws InterruptedException { Collection requests = new ArrayList<>(urisAndBodies.length); for (Tuple uriAndBody : urisAndBodies) { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java index 6fc9a4e674a..ef8621dfcd3 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java @@ -142,9 +142,9 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { } @Override - public HttpServerTransport stop() { + public void stop() { executorService.shutdownNow(); - return super.stop(); + super.stop(); } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 6cd9c596a51..32e519d162b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -73,6 +72,7 @@ import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptSettings; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.TestSearchContext; @@ -84,11 +84,12 @@ import org.elasticsearch.watcher.ResourceWatcherService; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import static java.util.Collections.emptyMap; + public class IndexModuleTests extends ESTestCase { private Index index; private Settings settings; @@ -134,7 +135,7 @@ public class IndexModuleTests extends ESTestCase { environment = new Environment(settings); nodeServicesProvider = newNodeServiceProvider(settings, environment, null); nodeEnvironment = new NodeEnvironment(settings, environment); - mapperRegistry = new IndicesModule(new NamedWriteableRegistry()).getMapperRegistry(); + mapperRegistry = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()).getMapperRegistry(); } @Override @@ -147,7 +148,8 @@ public class IndexModuleTests extends ESTestCase { } public void testWrapperIsBound() throws IOException { - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.setSearcherWrapper((s) -> new Wrapper()); module.engineFactory.set(new MockEngineFactory(AssertingDirectoryReader.class)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, @@ -166,7 +168,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.addIndexStore("foo_store", FooStore::new); try { module.addIndexStore("foo_store", FooStore::new); @@ -190,8 +193,8 @@ public class IndexModuleTests extends ESTestCase { } }; IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); - Consumer listener = (s) -> {}; + IndexModule module = new IndexModule(indexSettings, null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.addIndexEventListener(eventListener); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, this.listener)); @@ -206,7 +209,8 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { Setting booleanSetting = Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); Setting booleanSetting2 = Setting.boolSetting("index.foo.bar.baz", false, Property.Dynamic, Property.IndexScope); AtomicBoolean atomicBoolean = new AtomicBoolean(false); module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); @@ -226,7 +230,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddIndexOperationListener() throws IOException { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); AtomicBoolean executed = new AtomicBoolean(false); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -256,7 +261,8 @@ public class IndexModuleTests extends ESTestCase { } public void testAddSearchOperationListener() throws IOException { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); AtomicBoolean executed = new AtomicBoolean(false); SearchOperationListener listener = new SearchOperationListener() { @@ -291,7 +297,8 @@ public class IndexModuleTests extends ESTestCase { .put("index.similarity.my_similarity.key", "there is a key") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { @Override public String name() { @@ -315,7 +322,8 @@ public class IndexModuleTests extends ESTestCase { } public void testFrozen() { - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.freeze(); String msg = "Can't modify IndexModule once the index service has been created"; assertEquals(msg, expectThrows(IllegalStateException.class, () -> module.addSearchOperationListener(null)).getMessage()); @@ -333,7 +341,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); @@ -348,7 +357,8 @@ public class IndexModuleTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); try { module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); @@ -361,7 +371,8 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); expectThrows(AlreadySetException.class, () -> module.forceQueryCacheProvider((a, b) -> new CustomQueryCache())); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, @@ -374,7 +385,8 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); assertTrue(indexService.cache().query() instanceof IndexQueryCache); @@ -386,7 +398,8 @@ public class IndexModuleTests extends ESTestCase { .put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, new AnalysisRegistry(null, environment)); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings("foo", indexSettings), null, + new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); module.forceQueryCacheProvider((a, b) -> new CustomQueryCache()); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); diff --git a/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java b/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java index c41051ec59c..f95e8408a87 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -43,7 +44,7 @@ public class IndexRequestBuilderIT extends ESIntegTestCase { client().prepareIndex("test", "test").setSource("{\"test_field\" : \"foobar\"}"), client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}")), client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}")), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}").toBytes()), + client().prepareIndex("test", "test").setSource(BytesReference.toBytes(new BytesArray("{\"test_field\" : \"foobar\"}"))), client().prepareIndex("test", "test").setSource(map) }; indexRandom(true, builders); diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index b27f50054ef..6272b14d1ab 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -77,7 +77,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testFilteringAliases() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); add(indexService, "all", null); @@ -101,7 +100,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testAliasFilters() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); @@ -118,7 +116,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testRemovedAliasFilter() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); remove(indexService, "cats"); @@ -132,7 +129,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testUnknownAliasFilter() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 9c5040589ae..3909354c989 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -65,6 +65,31 @@ public class IndexSettingsTests extends ESTestCase { assertEquals(42, integer.get()); } + public void testSettingsUpdateValidator() { + Version version = VersionUtils.getPreviousVersion(); + Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); + final AtomicInteger integer = new AtomicInteger(0); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, + Property.Dynamic, Property.IndexScope); + IndexMetaData metaData = newIndexMeta("index", theSettings); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set, + (i) -> {if (i == 42) throw new AssertionError("boom");}); + + assertEquals(version, settings.getIndexVersionCreated()); + assertEquals("0xdeadbeef", settings.getUUID()); + + assertFalse(settings.updateIndexMetaData(metaData)); + assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); + assertEquals(0, integer.get()); + expectThrows(IllegalArgumentException.class, () -> settings.updateIndexMetaData(newIndexMeta("index", + Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); + assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 41) + .build()))); + assertEquals(41, integer.get()); + } + public void testMergedSettingsArePassed() { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version) diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 596714b1c5b..bd78607c617 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -94,7 +94,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { private Settings nodeSettings(String dataPath) { return Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .put(FsDirectoryService.INDEX_LOCK_FACTOR_SETTING.getKey(), randomFrom("native", "simple")) .build(); @@ -379,7 +379,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { assertThat(gResp2.getField("foo").getValue().toString(), equalTo("bar")); } - public void testPrimaryRelocationWithConcurrentIndexing() throws Throwable { + public void testPrimaryRelocationWithConcurrentIndexing() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); @@ -408,7 +408,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { final int numPhase2Docs = scaledRandomIntBetween(25, 200); final CountDownLatch phase1finished = new CountDownLatch(1); final CountDownLatch phase2finished = new CountDownLatch(1); - final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); Thread thread = new Thread() { @Override public void run() { @@ -418,8 +418,8 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { final IndexResponse indexResponse = client().prepareIndex(IDX, "doc", Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get(); assertTrue(indexResponse.isCreated()); - } catch (Throwable t) { - exceptions.add(t); + } catch (Exception e) { + exceptions.add(e); } final int docCount = counter.get(); if (docCount == numPhase1Docs) { @@ -454,7 +454,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { public void testPrimaryRelocationWhereRecoveryFails() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = Settings.builder() - .put("node.add_id_to_custom_path", false) + .put("node.add_lock_id_to_custom_path", false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 33089755a7a..e1d8a878c14 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -176,28 +176,28 @@ public class IndexingSlowLogTests extends ESTestCase { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.indexing.slowlog.threshold.index.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.indexing.slowlog.threshold.index.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.indexing.slowlog.threshold.index.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.indexing.slowlog.threshold.index.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } } diff --git a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 04291d957a8..d2bffb0f749 100644 --- a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -249,28 +249,28 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.query.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.query.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.query.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.query.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } } @@ -320,28 +320,28 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.fetch.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.fetch.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.fetch.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); fail(); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "failed to parse setting [index.search.slowlog.threshold.fetch.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); } } diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 000d2509ea8..8e2b8f68963 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -45,8 +45,6 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, - Property.Dynamic, Property.IndexScope); @Override public List> getSettings() { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index b72996bd1a1..52fcdd4bb2e 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; @@ -29,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -41,12 +43,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class AnalysisServiceTests extends ESTestCase { - private static AnalyzerProvider analyzerProvider(final String name) { + private static AnalyzerProvider analyzerProvider(final String name) { return new PreBuiltAnalyzerProvider(name, AnalyzerScope.INDEX, new EnglishAnalyzer()); } @@ -58,7 +63,8 @@ public class AnalysisServiceTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()) + .build(idxSettings); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -68,33 +74,28 @@ public class AnalysisServiceTests extends ESTestCase { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default", analyzerProvider("default")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default", analyzerProvider("default")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); } - public void testOverrideDefaultIndexAnalyzer() { + public void testOverrideDefaultIndexAnalyzerIsUnsupported() { Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - try { - AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_index", new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer())), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); - fail("Expected ISE"); - } catch (IllegalArgumentException e) { - // expected - assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); - } + AnalyzerProvider defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), + singletonMap("default_index", defaultIndex), emptyMap(), emptyMap(), emptyMap())); + assertTrue(e.getMessage().contains("[index.analysis.analyzer.default_index] is not supported")); } public void testBackCompatOverrideDefaultIndexAnalyzer() { - Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), + VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_index", analyzerProvider("default_index")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default_index", analyzerProvider("default_index")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -104,17 +105,17 @@ public class AnalysisServiceTests extends ESTestCase { Version version = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), - Collections.singletonMap("default_search", analyzerProvider("default_search")), - Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + singletonMap("default_search", analyzerProvider("default_search")), emptyMap(), emptyMap(), emptyMap()); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); assertThat(analysisService.defaultSearchAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); assertThat(analysisService.defaultSearchQuoteAnalyzer().analyzer(), instanceOf(EnglishAnalyzer.class)); } public void testBackCompatOverrideDefaultIndexAndSearchAnalyzer() { - Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), + VersionUtils.getPreviousVersion(Version.V_5_0_0_alpha1)); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - Map analyzers = new HashMap<>(); + Map> analyzers = new HashMap<>(); analyzers.put("default_index", analyzerProvider("default_index")); analyzers.put("default_search", analyzerProvider("default_search")); AnalysisService analysisService = new AnalysisService(IndexSettingsModule.newIndexSettings("index", settings), @@ -125,7 +126,6 @@ public class AnalysisServiceTests extends ESTestCase { } public void testConfigureCamelCaseTokenFilter() throws IOException { - // tests a filter that Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -137,7 +137,9 @@ public class AnalysisServiceTests extends ESTestCase { .putArray("index.analysis.analyzer.custom_analyzer_1.filter", "lowercase", "word_delimiter").build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + + AnalysisService analysisService = new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry() + .build(idxSettings); try (NamedAnalyzer custom_analyser = analysisService.analyzer("custom_analyzer")) { assertNotNull(custom_analyser); TokenStream tokenStream = custom_analyser.tokenStream("foo", "J2SE j2ee"); @@ -176,8 +178,10 @@ public class AnalysisServiceTests extends ESTestCase { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - AnalysisService otherAnalysisSergice = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()) + .build(idxSettings); + AnalysisService otherAnalysisSergice = new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), + emptyMap()).build(idxSettings); final int numIters = randomIntBetween(5, 20); for (int i = 0; i < numIters; i++) { PreBuiltAnalyzers preBuiltAnalyzers = RandomPicks.randomFrom(random(), PreBuiltAnalyzers.values()); @@ -196,7 +200,8 @@ public class AnalysisServiceTests extends ESTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new AnalysisRegistry(null, new Environment(settings)).build(idxSettings)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap()).build(idxSettings)); assertThat(e.getMessage(), equalTo("analyzer [test_analyzer] must specify either an analyzer type, or a tokenizer")); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 3e33123f932..40ec2b412ff 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -23,14 +23,14 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.nio.file.Path; -import java.util.Collections; + +import static java.util.Collections.emptyList; public class AnalysisTestsHelper { @@ -49,7 +49,6 @@ public class AnalysisTestsHelper { settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); } IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - Environment environment = new Environment(settings); - return new AnalysisRegistry(new HunspellService(settings, environment, Collections.emptyMap()), environment).build(idxSettings); + return new AnalysisModule(new Environment(settings), emptyList()).getAnalysisRegistry().build(idxSettings); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java index 2b2c9288f17..3f2b1461ef3 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java @@ -26,6 +26,8 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + /** */ public class CharFilterTests extends ESTokenStreamTestCase { @@ -39,7 +41,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); assertTokenStreamContents(analyzer1.tokenStream("test", "jeff quit phish"), new String[]{"jeff", "qit", "fish"}); @@ -56,7 +58,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); @@ -78,7 +80,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("custom_with_char_filter"); assertTokenStreamContents(analyzer1.tokenStream("test", "faBBbBB aBbbbBf"), new String[]{"foo", "oof"}); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index 0e3af58dc90..0c9010b2c9b 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -31,15 +31,20 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; +import java.util.Map; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; @@ -50,8 +55,13 @@ public class CompoundAnalysisTests extends ESTestCase { public void testDefaultsCompoundAnalysis() throws Exception { Settings settings = getJsonSettings(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(),Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); TokenFilterFactory filterFactory = analysisService.tokenFilter("dict_dec"); MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); @@ -62,14 +72,20 @@ public class CompoundAnalysisTests extends ESTestCase { for (Settings settings : settingsArr) { List terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe"); MatcherAssert.assertThat(terms.size(), equalTo(8)); - MatcherAssert.assertThat(terms, hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe")); + MatcherAssert.assertThat(terms, + hasItems("donau", "dampf", "schiff", "donaudampfschiff", "spargel", "creme", "suppe", "spargelcremesuppe")); } } private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new),Collections.emptyMap(),Collections.emptyMap()).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); Analyzer analyzer = analysisService.analyzer(analyzerName).analyzer(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java index 3f5ad6fed4f..caefb1039c2 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; import static org.hamcrest.Matchers.containsString; public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { @@ -39,7 +40,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("single"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java index 39641c281aa..88c5fe692d6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + public class StopAnalyzerTests extends ESTokenStreamTestCase { public void testDefaultsCompoundAnalysis() throws Exception { String json = "/org/elasticsearch/index/analysis/stop.json"; @@ -36,7 +38,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); NamedAnalyzer analyzer1 = analysisService.analyzer("analyzer1"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index 104bd17fb33..9e4d5b27ad7 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -26,13 +26,10 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.lucene.all.AllEntries; import org.elasticsearch.common.lucene.all.AllTokenStream; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -67,8 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); - + analysisService = createAnalysisService(idxSettings, settings); match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!"); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index fdf1411fdf8..80f1cbe46d0 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -36,9 +36,7 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; @@ -97,7 +95,7 @@ public class CodecTests extends ESTestCase { .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(settings); + AnalysisService analysisService = createAnalysisService(settings, nodeSettings); MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); MapperService service = new MapperService(settings, analysisService, similarityService, mapperRegistry, () -> null); return new CodecService(service, ESLoggerFactory.getLogger("test")); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 0d39e4d2d7a..1ae169eb13e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -303,7 +303,7 @@ public class InternalEngineTests extends ESTestCase { } Engine.EventListener listener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -763,7 +763,7 @@ public class InternalEngineTests extends ESTestCase { // create a document Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -776,7 +776,7 @@ public class InternalEngineTests extends ESTestCase { // but, we can still get it (in realtime) Engine.GetResult getResult = engine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_1.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_1)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -802,7 +802,7 @@ public class InternalEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -816,7 +816,7 @@ public class InternalEngineTests extends ESTestCase { // but, we can still get it (in realtime) getResult = engine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_2.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_2)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -855,7 +855,7 @@ public class InternalEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED)); @@ -2144,7 +2144,7 @@ public class InternalEngineTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); - MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry()).getMapperRegistry(); + MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()).getMapperRegistry(); MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null); DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService); this.docMapper = b.build(mapperService); @@ -2214,7 +2214,7 @@ public class InternalEngineTests extends ESTestCase { } public void testShardNotAvailableExceptionWhenEngineClosedConcurrently() throws IOException, InterruptedException { - AtomicReference throwable = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); String operation = randomFrom("optimize", "refresh", "flush"); Thread mergeThread = new Thread() { @Override @@ -2237,8 +2237,8 @@ public class InternalEngineTests extends ESTestCase { break; } } - } catch (Throwable t) { - throwable.set(t); + } catch (Exception e) { + exception.set(e); stop = true; } } @@ -2247,8 +2247,8 @@ public class InternalEngineTests extends ESTestCase { mergeThread.start(); engine.close(); mergeThread.join(); - logger.info("exception caught: ", throwable.get()); - assertTrue("expected an Exception that signals shard is not available", TransportActions.isShardNotAvailableException(throwable.get())); + logger.info("exception caught: ", exception.get()); + assertTrue("expected an Exception that signals shard is not available", TransportActions.isShardNotAvailableException(exception.get())); } public void testCurrentTranslogIDisCommitted() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index a739361f501..33e6914e260 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -244,7 +244,7 @@ public class ShadowEngineTests extends ESTestCase { } Engine.EventListener eventListener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -502,7 +502,7 @@ public class ShadowEngineTests extends ESTestCase { public void testShadowEngineIgnoresWriteOperations() throws Exception { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); @@ -540,7 +540,7 @@ public class ShadowEngineTests extends ESTestCase { // Now, add a document to the primary so we can test shadow engine deletes document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); @@ -595,7 +595,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -614,7 +614,7 @@ public class ShadowEngineTests extends ESTestCase { // but, we can still get it (in realtime) Engine.GetResult getResult = primaryEngine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_1.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_1)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -651,7 +651,7 @@ public class ShadowEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -665,7 +665,7 @@ public class ShadowEngineTests extends ESTestCase { // but, we can still get it (in realtime) getResult = primaryEngine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_2.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_2)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -722,7 +722,7 @@ public class ShadowEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -973,7 +973,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); pEngine.index(new Engine.Index(newUid("1"), doc)); pEngine.flush(true, true); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index f9fb5e77b70..9a8815e9398 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -116,7 +115,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { onFailureCalled.set(true); assertThat(e, instanceOf(IndexNotFoundException.class)); assertEquals(e.getMessage(), "no such index"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java index 8afdea27451..71628c06128 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java @@ -96,8 +96,8 @@ public class DynamicMappingIntegrationIT extends ESIntegTestCase { try { startLatch.await(); assertTrue(client().prepareIndex("index", "type", id).setSource("field" + id, "bar").get().isCreated()); - } catch (Throwable t) { - error.compareAndSet(null, t); + } catch (Exception e) { + error.compareAndSet(null, e); } } }); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 68e59527982..37d0436c9db 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -32,7 +32,7 @@ import java.util.List; public abstract class FieldTypeTestCase extends ESTestCase { /** Abstraction for mutating a property of a MappedFieldType */ - public static abstract class Modifier { + public abstract static class Modifier { /** The name of the property that is being modified. Used in test failure messages. */ public final String property; /** true if this modifier only makes types incompatible in strict mode, false otherwise */ diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index c93c181f860..2afeb02499d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -103,7 +103,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { // 2. already existing index IndexService indexService = createIndex("index2"); - expectThrows(ExecutionException.class, () -> { + e = expectThrows(ExecutionException.class, () -> { client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "2").setSource().execute().get(); }); throwable = ExceptionsHelper.unwrapCause(e.getCause()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 165b49d3145..817dc6e50df 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -292,17 +293,17 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8(); + String mapping = mappingBuilder.endObject().endObject().bytes().utf8ToString(); logger.info("Mapping: {}", mapping); DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping)); - byte[] json = jsonBuilder().startObject() + byte[] json = BytesReference.toBytes(jsonBuilder().startObject() .field("foo", "bar") .field("foobar", "foobar") - .endObject().bytes().toBytes(); + .endObject().bytes()); Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc(); IndexableField[] fields = doc.getFields("_all"); if (enabled) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index 7be0cc8031b..4bf1d0c68f7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.binary; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -76,10 +77,10 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { // case 2: a value that looks compressed: this used to fail in 1.x BytesStreamOutput out = new BytesStreamOutput(); - try (StreamOutput compressed = CompressorFactory.defaultCompressor().streamOutput(out)) { + try (StreamOutput compressed = CompressorFactory.COMPRESSOR.streamOutput(out)) { new BytesArray(binaryValue1).writeTo(compressed); } - final byte[] binaryValue2 = out.bytes().toBytes(); + final byte[] binaryValue2 = BytesReference.toBytes(out.bytes()); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java index 115faf2c6a1..81b7375ab2e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapperPlugin.java @@ -19,21 +19,35 @@ package org.elasticsearch.index.mapper.externalvalues; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -public class ExternalMapperPlugin extends Plugin { +public class ExternalMapperPlugin extends Plugin implements MapperPlugin { public static final String EXTERNAL = "external"; public static final String EXTERNAL_BIS = "external_bis"; public static final String EXTERNAL_UPPER = "external_upper"; - public void onModule(IndicesModule indicesModule) { - indicesModule.registerMetadataMapper(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()); - indicesModule.registerMapper(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo")); - indicesModule.registerMapper(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar")); - indicesModule.registerMapper(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR")); - indicesModule.registerMapper(FakeStringFieldMapper.CONTENT_TYPE, new FakeStringFieldMapper.TypeParser()); + @Override + public Map getMappers() { + Map mappers = new HashMap<>(); + mappers.put(EXTERNAL, new ExternalMapper.TypeParser(EXTERNAL, "foo")); + mappers.put(EXTERNAL_BIS, new ExternalMapper.TypeParser(EXTERNAL_BIS, "bar")); + mappers.put(EXTERNAL_UPPER, new ExternalMapper.TypeParser(EXTERNAL_UPPER, "FOO BAR")); + mappers.put(FakeStringFieldMapper.CONTENT_TYPE, new FakeStringFieldMapper.TypeParser()); + return Collections.unmodifiableMap(mappers); + } + + @Override + public Map getMetadataMappers() { + return Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index faa54b72a69..1ac097cb7b1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -39,10 +40,12 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.TermBasedFieldType; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.SortedSet; @@ -235,8 +238,10 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { public void testSeesFieldsFromPlugins() throws IOException { IndexService indexService = createIndex("test"); - IndicesModule indicesModule = new IndicesModule(new NamedWriteableRegistry()); - indicesModule.registerMetadataMapper("_dummy", new DummyMetadataFieldMapper.TypeParser()); + IndicesModule indicesModule = newTestIndicesModule( + Collections.emptyMap(), + Collections.singletonMap("_dummy", new DummyMetadataFieldMapper.TypeParser()) + ); final MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); MapperService mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), mapperRegistry, indexService::newQueryShardContext); DocumentMapperParser parser = new DocumentMapperParser(indexService.getIndexSettings(), mapperService, diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 627f268545a..0133d3e5943 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -159,7 +159,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { final AtomicBoolean stopped = new AtomicBoolean(false); final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicReference lastIntroducedFieldName = new AtomicReference<>(); - final AtomicReference error = new AtomicReference<>(); + final AtomicReference error = new AtomicReference<>(); final Thread updater = new Thread() { @Override public void run() { @@ -173,8 +173,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { lastIntroducedFieldName.set(fieldName); mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false); } - } catch (Throwable t) { - error.set(t); + } catch (Exception e) { + error.set(e); } finally { stopped.set(true); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 78da5abb746..8f38e2be576 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -297,7 +297,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } @@ -314,7 +314,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } @@ -331,7 +331,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index ad9e03327b2..620968ddbe8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -139,7 +139,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_ttl") - .field("default", "1w") + .field("default", "7d") .endObject() .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() .endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 89da227df87..3839254bf1d 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -370,6 +371,28 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase parseQuery(query, ParseFieldMatcher.EMPTY)); + assertEquals("no [query] registered for [unknown_query]", ex.getMessage()); + } + + /** + * test that two queries in object throws error + */ + public void testTooManyQueriesInObject() throws IOException { + String clauseType = randomFrom(new String[] {"must", "should", "must_not", "filter"}); + // should also throw error if invalid query is preceded by a valid one + String query = "{\"bool\" : {\"" + clauseType + + "\" : { \"match\" : { \"foo\" : \"bar\" } , \"match\" : { \"baz\" : \"buzz\" } } } }"; + ParsingException ex = expectThrows(ParsingException.class, () -> parseQuery(query, ParseFieldMatcher.EMPTY)); + assertEquals("expected [END_OBJECT] but got [FIELD_NAME], possibly too many query clauses", ex.getMessage()); + } + public void testRewrite() throws IOException { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolean mustRewrite = false; diff --git a/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java index 695330c21e2..667efbc8bac 100644 --- a/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java @@ -40,41 +40,41 @@ public class CombineFunctionTests extends ESTestCase { public void testWriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MULTIPLY.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.REPLACE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.SUM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.AVG.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(3)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(4)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MAX.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(5)); } } @@ -83,37 +83,37 @@ public class CombineFunctionTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MULTIPLY)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.REPLACE)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.SUM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(3); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.AVG)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(4); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MIN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(5); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MAX)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 841f35bd975..da8610f29f6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -67,7 +67,7 @@ public class InnerHitBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry(); } @AfterClass @@ -381,7 +381,7 @@ public class InnerHitBuilderTests extends ESTestCase { private static InnerHitBuilder serializedCopy(InnerHitBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new InnerHitBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 91a42d70809..3c5bfed86dd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -208,7 +207,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); - protected final static RecoveryTargetService.RecoveryListener recoveryListener = new RecoveryTargetService.RecoveryListener() { + private final Index index = new Index("test", "uuid"); + private final ShardId shardId = new ShardId(index, 0); + private final Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); + protected static final RecoveryTargetService.RecoveryListener recoveryListener = new RecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -199,7 +199,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { } private DiscoveryNode getDiscoveryNode(String id) { - return new DiscoveryNode(id, id, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + return new DiscoveryNode(id, id, LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); } @@ -410,7 +410,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { } @Override - public void failShard(String message, Throwable throwable) { + public void failShard(String message, Exception exception) { throw new UnsupportedOperationException(); } @@ -455,14 +455,14 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { new TransportReplicationAction.ReplicaResponse( replica.routingEntry().allocationId().getId(), replica.getLocalCheckpoint())); - } catch (Throwable t) { - listener.onFailure(t); + } catch (Exception e) { + listener.onFailure(e); } } @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, Runnable onSuccess, - Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, Runnable onSuccess, + Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { throw new UnsupportedOperationException(); } diff --git a/core/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointServiceTests.java b/core/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointServiceTests.java index 1456ed6aca2..d1f4c3987b2 100644 --- a/core/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/seqno/LocalCheckpointServiceTests.java @@ -117,8 +117,8 @@ public class LocalCheckpointServiceTests extends ESTestCase { final int threadId = t; threads[t] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - throw new ElasticsearchException("failure in background thread", t); + public void onFailure(Exception e) { + throw new ElasticsearchException("failure in background thread", e); } @Override @@ -167,8 +167,8 @@ public class LocalCheckpointServiceTests extends ESTestCase { final int threadId = t; threads[t] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - throw new ElasticsearchException("failure in background thread", t); + public void onFailure(Exception e) { + throw new ElasticsearchException("failure in background thread", e); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java new file mode 100644 index 00000000000..c9bb9e19866 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java @@ -0,0 +1,219 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.shard; + +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class IndexShardOperationsLockTests extends ESTestCase { + + private static ThreadPool threadPool; + + private IndexShardOperationsLock block; + + @BeforeClass + public static void setupThreadPool() { + threadPool = new TestThreadPool("IndexShardOperationsLockTests"); + } + + @AfterClass + public static void shutdownThreadPool() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + + @Before + public void createIndexShardOperationsLock() { + block = new IndexShardOperationsLock(new ShardId("blubb", "id", 0), logger, threadPool); + } + + @After + public void checkNoInflightOperations() { + assertThat(block.semaphore.availablePermits(), equalTo(Integer.MAX_VALUE)); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } + + public void testAllOperationsInvoked() throws InterruptedException, TimeoutException, ExecutionException { + int numThreads = 10; + + List> futures = new ArrayList<>(); + List operationThreads = new ArrayList<>(); + CountDownLatch latch = new CountDownLatch(numThreads / 2); + for (int i = 0; i < numThreads; i++) { + PlainActionFuture future = new PlainActionFuture() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + super.onResponse(releasable); + } + }; + Thread thread = new Thread() { + public void run() { + latch.countDown(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + } + }; + futures.add(future); + operationThreads.add(thread); + } + + CountDownLatch blockFinished = new CountDownLatch(1); + threadPool.generic().execute(() -> { + try { + latch.await(); + blockAndWait().close(); + blockFinished.countDown(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + for (Thread thread : operationThreads) { + thread.start(); + } + + for (PlainActionFuture future : futures) { + assertNotNull(future.get(1, TimeUnit.MINUTES)); + } + + for (Thread thread : operationThreads) { + thread.join(); + } + + blockFinished.await(); + } + + + public void testOperationsInvokedImmediatelyIfNoBlock() throws ExecutionException, InterruptedException { + PlainActionFuture future = new PlainActionFuture<>(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + assertTrue(future.isDone()); + future.get().close(); + } + + public void testOperationsIfClosed() throws ExecutionException, InterruptedException { + PlainActionFuture future = new PlainActionFuture<>(); + block.close(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + ExecutionException exception = expectThrows(ExecutionException.class, future::get); + assertThat(exception.getCause(), instanceOf(IndexShardClosedException.class)); + } + + public void testBlockIfClosed() throws ExecutionException, InterruptedException { + block.close(); + expectThrows(IndexShardClosedException.class, () -> block.blockOperations(randomInt(10), TimeUnit.MINUTES, + () -> { throw new IllegalArgumentException("fake error"); })); + } + + public void testOperationsDelayedIfBlock() throws ExecutionException, InterruptedException, TimeoutException { + PlainActionFuture future = new PlainActionFuture<>(); + try (Releasable releasable = blockAndWait()) { + block.acquire(future, ThreadPool.Names.GENERIC, true); + assertFalse(future.isDone()); + } + future.get(1, TimeUnit.MINUTES).close(); + } + + protected Releasable blockAndWait() throws InterruptedException { + CountDownLatch blockAcquired = new CountDownLatch(1); + CountDownLatch releaseBlock = new CountDownLatch(1); + CountDownLatch blockReleased = new CountDownLatch(1); + boolean throwsException = randomBoolean(); + IndexShardClosedException exception = new IndexShardClosedException(new ShardId("blubb", "id", 0)); + threadPool.generic().execute(() -> { + try { + block.blockOperations(1, TimeUnit.MINUTES, () -> { + try { + blockAcquired.countDown(); + releaseBlock.await(); + if (throwsException) { + throw exception; + } + } catch (InterruptedException e) { + throw new RuntimeException(); + } + }); + } catch (Exception e) { + if (e != exception) { + throw new RuntimeException(e); + } + } finally { + blockReleased.countDown(); + } + }); + blockAcquired.await(); + return () -> { + releaseBlock.countDown(); + try { + blockReleased.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }; + } + + public void testActiveOperationsCount() throws ExecutionException, InterruptedException { + PlainActionFuture future1 = new PlainActionFuture<>(); + block.acquire(future1, ThreadPool.Names.GENERIC, true); + assertTrue(future1.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + + PlainActionFuture future2 = new PlainActionFuture<>(); + block.acquire(future2, ThreadPool.Names.GENERIC, true); + assertTrue(future2.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(2)); + + future1.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future1.get().close(); // check idempotence + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future2.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + + try (Releasable releasable = blockAndWait()) { + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } + + PlainActionFuture future3 = new PlainActionFuture<>(); + block.acquire(future3, ThreadPool.Names.GENERIC, true); + assertTrue(future3.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future3.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index ce37ce59aa6..79745cac27a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.InternalClusterInfoService; @@ -68,7 +69,7 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -105,6 +106,7 @@ import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.nio.file.Files; @@ -121,6 +123,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -295,13 +298,13 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().admin().indices().prepareDelete("test").get(); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); try { - indexShard.acquirePrimaryOperationLock(); + indexShard.acquirePrimaryOperationLock(null, ThreadPool.Names.INDEX); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } try { - indexShard.acquireReplicaOperationLock(indexShard.getPrimaryTerm()); + indexShard.acquireReplicaOperationLock(indexShard.getPrimaryTerm(), null, ThreadPool.Names.INDEX); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected @@ -339,21 +342,33 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(0, indexShard.getActiveOperationsCount()); if (newPrimaryShardRouting.isRelocationTarget() == false) { try { - indexShard.acquireReplicaOperationLock(primaryTerm); + indexShard.acquireReplicaOperationLock(primaryTerm, null, ThreadPool.Names.INDEX); fail("shard shouldn't accept operations as replica"); } catch (IllegalStateException ignored) { } } - Releasable operation1 = indexShard.acquirePrimaryOperationLock(); + Releasable operation1 = acquirePrimaryOperationLockBlockingly(indexShard); assertEquals(1, indexShard.getActiveOperationsCount()); - Releasable operation2 = indexShard.acquirePrimaryOperationLock(); + Releasable operation2 = acquirePrimaryOperationLockBlockingly(indexShard); assertEquals(2, indexShard.getActiveOperationsCount()); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); } + private Releasable acquirePrimaryOperationLockBlockingly(IndexShard indexShard) throws ExecutionException, InterruptedException { + PlainActionFuture fut = new PlainActionFuture<>(); + indexShard.acquirePrimaryOperationLock(fut, ThreadPool.Names.INDEX); + return fut.get(); + } + + private Releasable acquireReplicaOperationLockBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { + PlainActionFuture fut = new PlainActionFuture<>(); + indexShard.acquireReplicaOperationLock(opPrimaryTerm, fut, ThreadPool.Names.INDEX); + return fut.get(); + } + public void testOperationLocksOnReplicaShards() throws InterruptedException, ExecutionException, IOException { assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); ensureGreen("test"); @@ -399,20 +414,20 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(0, indexShard.getActiveOperationsCount()); if (newShardRouting.primary() == false) { try { - indexShard.acquirePrimaryOperationLock(); + indexShard.acquirePrimaryOperationLock(null, ThreadPool.Names.INDEX); fail("shard shouldn't accept primary ops"); } catch (IllegalStateException ignored) { } } - Releasable operation1 = indexShard.acquireReplicaOperationLock(primaryTerm); + Releasable operation1 = acquireReplicaOperationLockBlockingly(indexShard, primaryTerm); assertEquals(1, indexShard.getActiveOperationsCount()); - Releasable operation2 = indexShard.acquireReplicaOperationLock(primaryTerm); + Releasable operation2 = acquireReplicaOperationLockBlockingly(indexShard, primaryTerm); assertEquals(2, indexShard.getActiveOperationsCount()); try { - indexShard.acquireReplicaOperationLock(primaryTerm - 1); + indexShard.acquireReplicaOperationLock(primaryTerm - 1, null, ThreadPool.Names.INDEX); fail("you can not increment the operation counter with an older primary term"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("operation term")); @@ -420,7 +435,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } // but you can increment with a newer one.. - indexShard.acquireReplicaOperationLock(primaryTerm + 1 + randomInt(20)).close(); + acquireReplicaOperationLockBlockingly(indexShard, primaryTerm + 1 + randomInt(20)).close(); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); } @@ -448,7 +463,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { public static void write(ShardStateMetaData shardStateMetaData, Path... shardPaths) throws IOException { - ShardStateMetaData.FORMAT.write(shardStateMetaData, shardStateMetaData.legacyVersion, shardPaths); + ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); } public void testDurableFlagHasEffect() { @@ -658,7 +673,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { if (randomBoolean() || true) { // try to serialize it to ensure values survive the serialization BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); stats = ShardStats.readShardStats(in); } XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -716,7 +731,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @@ -732,7 +747,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } @@ -888,13 +903,18 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); + assertBusy(() -> assertThat(shard.state(), equalTo(IndexShardState.STARTED))); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); - shard.relocated("simulated recovery"); + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } }); - try (Releasable ignored = shard.acquirePrimaryOperationLock()) { + try (Releasable ignored = acquirePrimaryOperationLockBlockingly(shard)) { // start finalization of recovery recoveryThread.start(); latch.await(); @@ -904,12 +924,50 @@ public class IndexShardTests extends ESSingleNodeTestCase { // recovery can be now finalized recoveryThread.join(); assertThat(shard.state(), equalTo(IndexShardState.RELOCATED)); - try (Releasable ignored = shard.acquirePrimaryOperationLock()) { + try (Releasable ignored = acquirePrimaryOperationLockBlockingly(shard)) { // lock can again be acquired assertThat(shard.state(), equalTo(IndexShardState.RELOCATED)); } } + public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test").setSettings( + Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + ).get()); + ensureGreen(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + final IndexShard shard = test.getShardOrNull(0); + assertBusy(() -> assertThat(shard.state(), equalTo(IndexShardState.STARTED))); + Thread recoveryThread = new Thread(() -> { + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + recoveryThread.start(); + List> onLockAcquiredActions = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + PlainActionFuture onLockAcquired = new PlainActionFuture() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + super.onResponse(releasable); + } + }; + shard.acquirePrimaryOperationLock(onLockAcquired, ThreadPool.Names.INDEX); + onLockAcquiredActions.add(onLockAcquired); + } + + for (PlainActionFuture onLockAcquired : onLockAcquiredActions) { + assertNotNull(onLockAcquired.get(30, TimeUnit.SECONDS)); + } + + recoveryThread.join(); + } + public void testStressRelocated() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").setSettings( Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) @@ -926,10 +984,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { indexThreads[i] = new Thread() { @Override public void run() { - try (Releasable operationLock = shard.acquirePrimaryOperationLock()) { + try (Releasable operationLock = acquirePrimaryOperationLockBlockingly(shard)) { allPrimaryOperationLocksAcquired.countDown(); barrier.await(); - } catch (InterruptedException | BrokenBarrierException e) { + } catch (InterruptedException | BrokenBarrierException | ExecutionException e) { throw new RuntimeException(e); } } @@ -938,7 +996,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { } AtomicBoolean relocated = new AtomicBoolean(); final Thread recoveryThread = new Thread(() -> { - shard.relocated("simulated recovery"); + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } relocated.set(true); }); // ensure we wait for all primary operation locks to be acquired @@ -980,7 +1042,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { routing = ShardRoutingHelper.reinit(routing); IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); @@ -1007,7 +1069,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { routing = ShardRoutingHelper.reinit(routing, UnassignedInfo.Reason.INDEX_CREATED); IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); @@ -1024,7 +1086,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); @@ -1120,7 +1182,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { Store targetStore = test_target_shard.store(); test_target_shard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); test_target_shard.markAsRecovering("store", new RecoveryState(routing.shardId(), routing.primary(), RecoveryState.Type.SNAPSHOT, routing.restoreSource(), localNode)); assertTrue(test_target_shard.restoreFromRepository(new IndexShardRepository() { @Override @@ -1327,7 +1389,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {}; shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); - List failures = new ArrayList<>(); + List failures = new ArrayList<>(); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -1337,9 +1399,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { // this is all IMC needs to do - check current memory and refresh assertTrue(shardRef.get().getIndexBufferRAMBytesUsed() > 0); shardRef.get().refresh("test"); - } catch (Throwable t) { - failures.add(t); - throw t; + } catch (Exception e) { + failures.add(e); + throw e; } } @@ -1351,9 +1413,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { // this is all IMC needs to do - check current memory and refresh assertTrue(shardRef.get().getIndexBufferRAMBytesUsed() > 0); shardRef.get().refresh("test"); - } catch (Throwable t) { - failures.add(t); - throw t; + } catch (Exception e) { + failures.add(e); + throw e; } } }; @@ -1404,7 +1466,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public static final IndexShard recoverShard(IndexShard newShard) throws IOException { - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), newShard.routingEntry().primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); newShard.updateRoutingEntry(newShard.routingEntry().moveToStarted()); @@ -1417,8 +1479,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(), indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper, indexService.getThreadPool(), indexService.getBigArrays(), null, shard.getGlobalCheckpointSyncer(), - Collections.emptyList(), Arrays.asList(listeners) - ); + Collections.emptyList(), Arrays.asList(listeners)); return newShard; } @@ -1446,10 +1507,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardRouting routing = getInitializingShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); List operations = new ArrayList<>(); - operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes())); + operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); newShard.recoveryState().getTranslog().totalOperations(operations.size()); newShard.skipTranslogRecovery(); @@ -1474,7 +1535,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); newShard.shardRouting = routing; - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(newShard.isActive()); @@ -1502,12 +1563,12 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardRouting routing = getInitializingShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(newShard.isActive()); List operations = new ArrayList<>(); - operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes())); + operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); newShard.skipTranslogRecovery(); // Shard is still inactive since we haven't started recovering yet @@ -1537,7 +1598,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexShard shard = test.getShardOrNull(0); ShardRouting routing = ShardRoutingHelper.initWithSameId(shard.routingEntry()); test.removeShard(0, "b/c simon says so"); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); { final IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java index 8d86e64a391..d1cf8b32f58 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java @@ -51,7 +51,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @@ -67,7 +67,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } }; @@ -83,7 +83,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ throw new RuntimeException(); } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { throw new RuntimeException(); } @Override @@ -96,7 +96,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ throw new RuntimeException(); } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { throw new RuntimeException(); } }; diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 375b0e955d3..039e7af23a4 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -115,7 +115,7 @@ public class RefreshListenersTests extends ESTestCase { BigArrays.NON_RECYCLING_INSTANCE); Engine.EventListener eventListener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -251,7 +251,7 @@ public class RefreshListenersTests extends ESTestCase { getResult.docIdAndVersion().context.reader().document(getResult.docIdAndVersion().docId, visitor); assertEquals(Arrays.asList(testFieldValue), visitor.fields().get("test")); } - } catch (Throwable t) { + } catch (Exception t) { throw new RuntimeException("failure on the [" + iteration + "] iteration of thread [" + threadId + "]", t); } } @@ -291,7 +291,7 @@ public class RefreshListenersTests extends ESTestCase { * When the listener is called this captures it's only argument. */ AtomicReference forcedRefresh = new AtomicReference<>(); - private volatile Throwable error; + private volatile Exception error; @Override public void accept(Boolean forcedRefresh) { @@ -299,7 +299,7 @@ public class RefreshListenersTests extends ESTestCase { assertNotNull(forcedRefresh); Boolean oldValue = this.forcedRefresh.getAndSet(forcedRefresh); assertNull("Listener called twice", oldValue); - } catch (Throwable e) { + } catch (Exception e) { error = e; } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index b2bd7e1f9ff..749b1621e4d 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -45,7 +45,7 @@ public class ShardPathTests extends ESTestCase { ShardId shardId = new ShardId("foo", "0xDEADBEEF", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); assertEquals(path, shardPath.getDataPath()); assertEquals("0xDEADBEEF", shardPath.getShardId().getIndex().getUUID()); @@ -65,7 +65,7 @@ public class ShardPathTests extends ESTestCase { Path[] paths = env.availableShardPaths(shardId); assumeTrue("This test tests multi data.path but we only got one", paths.length > 1); int id = randomIntBetween(1, 10); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), id, paths); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths); ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { @@ -82,7 +82,7 @@ public class ShardPathTests extends ESTestCase { Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); int id = randomIntBetween(1, 10); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path); ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { @@ -124,7 +124,7 @@ public class ShardPathTests extends ESTestCase { final boolean includeNodeId = randomBoolean(); indexSettings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build(); nodeSettings = Settings.builder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath()) - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build(); if (includeNodeId) { customPath = path.resolve("custom").resolve("0"); } else { @@ -139,7 +139,7 @@ public class ShardPathTests extends ESTestCase { ShardId shardId = new ShardId("foo", indexUUID, 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), 2, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings)); boolean found = false; for (Path p : env.nodeDataPaths()) { diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 70eacaafedb..abaebb88c5e 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.snapshots.blobstore; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,7 +53,7 @@ public class FileInfoTests extends ESTestCase { BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); - byte[] xcontent = shuffleXContent(builder).bytes().toBytes(); + byte[] xcontent = BytesReference.toBytes(shuffleXContent(builder).bytes()); final BlobStoreIndexShardSnapshot.FileInfo parsedInfo; try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xcontent)) { @@ -111,7 +112,7 @@ public class FileInfoTests extends ESTestCase { builder.field(FileInfo.WRITTEN_BY, Version.LATEST.toString()); builder.field(FileInfo.CHECKSUM, "666"); builder.endObject(); - byte[] xContent = builder.bytes().toBytes(); + byte[] xContent = BytesReference.toBytes(builder.bytes()); if (failure == null) { // No failures should read as usual diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 6508336d9f8..a8f8a9f802d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -192,7 +193,7 @@ public class CorruptedFileIT extends ESIntegTestCase { * run the checkindex. if the corruption is still there we will catch it. */ final CountDownLatch latch = new CountDownLatch(numShards * 3); // primary + 2 replicas - final CopyOnWriteArrayList exception = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exception = new CopyOnWriteArrayList<>(); final IndexEventListener listener = new IndexEventListener() { @Override public void afterIndexShardClosed(ShardId sid, @Nullable IndexShard indexShard, Settings indexSettings) { @@ -210,12 +211,12 @@ public class CorruptedFileIT extends ESIntegTestCase { out.flush(); CheckIndex.Status status = checkIndex.checkIndex(); if (!status.clean) { - logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + logger.warn("check index [failure]\n{}", os.bytes().utf8ToString()); throw new IOException("index check failure"); } } - } catch (Throwable t) { - exception.add(t); + } catch (Exception e) { + exception.add(e); } finally { store.decRef(); latch.countDown(); @@ -346,7 +347,7 @@ public class CorruptedFileIT extends ESIntegTestCase { public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { if (corrupt.get() && action.equals(RecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; - byte[] array = req.content().array(); + byte[] array = BytesRef.deepCopyOf(req.content().toBytesRef()).bytes; int i = randomIntBetween(0, req.content().length() - 1); array[i] = (byte) ~array[i]; // flip one byte in the content hasCorrupted.countDown(); @@ -419,10 +420,12 @@ public class CorruptedFileIT extends ESIntegTestCase { if (action.equals(RecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; if (truncate && req.length() > 1) { - BytesArray array = new BytesArray(req.content().array(), req.content().arrayOffset(), (int) req.length() - 1); + BytesRef bytesRef = req.content().toBytesRef(); + BytesArray array = new BytesArray(bytesRef.bytes, bytesRef.offset, (int) req.length() - 1); request = new RecoveryFileChunkRequest(req.recoveryId(), req.shardId(), req.metadata(), req.position(), array, req.lastChunk(), req.totalTranslogOps(), req.sourceThrottleTimeInNanos()); } else { - byte[] array = req.content().array(); + assert req.content().toBytesRef().bytes == req.content().toBytesRef().bytes : "no internal reference!!"; + final byte[] array = req.content().toBytesRef().bytes; int i = randomIntBetween(0, req.content().length() - 1); array[i] = (byte) ~array[i]; // flip one byte in the content } @@ -643,12 +646,12 @@ public class CorruptedFileIT extends ESIntegTestCase { return shardRouting; } - private static final boolean isPerCommitFile(String fileName) { + private static boolean isPerCommitFile(String fileName) { // .liv and segments_N are per commit files and might change after corruption return fileName.startsWith("segments") || fileName.endsWith(".liv"); } - private static final boolean isPerSegmentFile(String fileName) { + private static boolean isPerSegmentFile(String fileName) { return isPerCommitFile(fileName) == false; } diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 4f3e68a9115..e40f1c7f06f 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -933,7 +933,7 @@ public class StoreTests extends ESTestCase { public void testStreamStoreFilesMetaData() throws Exception { Store.MetadataSnapshot metadataSnapshot = createMetaDataSnapshot(); - TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(randomBoolean(), new ShardId("test", "_na_", 0),metadataSnapshot); + TransportNodesListShardStoreMetaData.StoreFilesMetaData outStoreFileMetaData = new TransportNodesListShardStoreMetaData.StoreFilesMetaData(new ShardId("test", "_na_", 0),metadataSnapshot); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); org.elasticsearch.Version targetNodeVersion = randomVersion(random()); @@ -1086,7 +1086,7 @@ public class StoreTests extends ESTestCase { String uuid = Store.CORRUPTED + UUIDs.randomBase64UUID(); try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_STACK_TRACE); - output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0)); + output.writeString(ExceptionsHelper.detailedMessage(exception)); output.writeString(ExceptionsHelper.stackTrace(exception)); CodecUtil.writeFooter(output); } @@ -1102,7 +1102,7 @@ public class StoreTests extends ESTestCase { try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START); - output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0)); + output.writeString(ExceptionsHelper.detailedMessage(exception)); CodecUtil.writeFooter(output); } try { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index c08fa2f6d7b..e6c0620abf7 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.Term; import org.apache.lucene.mockfile.FilterFileChannel; @@ -60,9 +59,7 @@ import java.nio.charset.Charset; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.InvalidPathException; -import java.nio.file.OpenOption; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collection; @@ -217,24 +214,24 @@ public class TranslogTests extends ESTestCase { Translog.Location loc2 = translog.add(new Translog.Index("test", "2", new byte[]{2})); assertThat(loc2, greaterThan(loc1)); assertThat(translog.getLastWriteLocation(), greaterThan(loc2)); - assertThat(translog.read(loc1).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{1}))); - assertThat(translog.read(loc2).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{2}))); + assertThat(translog.read(loc1).getSource().source, equalTo(new BytesArray(new byte[]{1}))); + assertThat(translog.read(loc2).getSource().source, equalTo(new BytesArray(new byte[]{2}))); Translog.Location lastLocBeforeSync = translog.getLastWriteLocation(); translog.sync(); assertEquals(lastLocBeforeSync, translog.getLastWriteLocation()); - assertThat(translog.read(loc1).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{1}))); - assertThat(translog.read(loc2).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{2}))); + assertThat(translog.read(loc1).getSource().source, equalTo(new BytesArray(new byte[]{1}))); + assertThat(translog.read(loc2).getSource().source, equalTo(new BytesArray(new byte[]{2}))); Translog.Location loc3 = translog.add(new Translog.Index("test", "2", new byte[]{3})); assertThat(loc3, greaterThan(loc2)); assertThat(translog.getLastWriteLocation(), greaterThan(loc3)); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); lastLocBeforeSync = translog.getLastWriteLocation(); translog.sync(); assertEquals(lastLocBeforeSync, translog.getLastWriteLocation()); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); translog.prepareCommit(); /* * The commit adds to the lastWriteLocation even though is isn't really a write. This is just an implementation artifact but it can @@ -242,7 +239,7 @@ public class TranslogTests extends ESTestCase { * and less than the location of the next write operation. */ assertThat(translog.getLastWriteLocation(), greaterThan(lastLocBeforeSync)); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); translog.commit(); assertNull(translog.read(loc1)); assertNull(translog.read(loc2)); @@ -274,7 +271,7 @@ public class TranslogTests extends ESTestCase { Translog.Index index = (Translog.Index) snapshot.next(); assertThat(index != null, equalTo(true)); - assertThat(index.source().toBytes(), equalTo(new byte[]{1})); + assertThat(BytesReference.toBytes(index.source()), equalTo(new byte[]{1})); Translog.Delete delete = (Translog.Delete) snapshot.next(); assertThat(delete != null, equalTo(true)); @@ -303,7 +300,7 @@ public class TranslogTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); stats = new TranslogStats(); stats.readFrom(in); } @@ -350,7 +347,7 @@ public class TranslogTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); total.writeTo(out); TranslogStats copy = new TranslogStats(); - copy.readFrom(StreamInput.wrap(out.bytes())); + copy.readFrom(out.bytes().streamInput()); assertEquals(6, copy.estimatedNumberOfOperations()); assertEquals(437, copy.getTranslogSizeInBytes()); @@ -457,7 +454,7 @@ public class TranslogTests extends ESTestCase { final BlockingQueue writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); for (int i = 0; i < threadCount; i++) { final int threadId = i; @@ -625,7 +622,7 @@ public class TranslogTests extends ESTestCase { final AtomicBoolean run = new AtomicBoolean(true); // any errors on threads - final List errors = new CopyOnWriteArrayList<>(); + final List errors = new CopyOnWriteArrayList<>(); logger.debug("using [{}] readers. [{}] writers. flushing every ~[{}] ops.", readers.length, writers.length, flushEveryOps); for (int i = 0; i < writers.length; i++) { final String threadName = "writer_" + i; @@ -664,9 +661,9 @@ public class TranslogTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - logger.error("--> writer [{}] had an error", t, threadName); - errors.add(t); + public void onFailure(Exception e) { + logger.error("--> writer [{}] had an error", e, threadName); + errors.add(e); } }, threadName); writers[i].start(); @@ -679,14 +676,14 @@ public class TranslogTests extends ESTestCase { Set writtenOpsAtView; @Override - public void onFailure(Throwable t) { - logger.error("--> reader [{}] had an error", t, threadId); - errors.add(t); + public void onFailure(Exception e) { + logger.error("--> reader [{}] had an error", e, threadId); + errors.add(e); try { closeView(); - } catch (IOException e) { - logger.error("unexpected error while closing view, after failure"); - t.addSuppressed(e); + } catch (IOException inner) { + inner.addSuppressed(e); + logger.error("unexpected error while closing view, after failure", inner); } } @@ -826,7 +823,7 @@ public class TranslogTests extends ESTestCase { assertEquals(max.generation, translog.currentFileGeneration()); final Translog.Operation read = translog.read(max); - assertEquals(read.getSource().source.toUtf8(), Integer.toString(count)); + assertEquals(read.getSource().source.utf8ToString(), Integer.toString(count)); } public static Translog.Location max(Translog.Location a, Translog.Location b) { @@ -858,7 +855,7 @@ public class TranslogTests extends ESTestCase { Translog.Location location = locations.get(op); if (op <= lastSynced) { final Translog.Operation read = reader.read(location); - assertEquals(Integer.toString(op), read.getSource().source.toUtf8()); + assertEquals(Integer.toString(op), read.getSource().source.utf8ToString()); } else { try { reader.read(location); @@ -994,7 +991,7 @@ public class TranslogTests extends ESTestCase { assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals(i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1029,7 +1026,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } if (randomBoolean()) { // recover twice @@ -1042,7 +1039,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1083,7 +1080,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } @@ -1098,7 +1095,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1142,7 +1139,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1156,7 +1153,7 @@ public class TranslogTests extends ESTestCase { ops.add(test); } Translog.writeOperations(out, ops); - final List readOperations = Translog.readOperations(StreamInput.wrap(out.bytes())); + final List readOperations = Translog.readOperations(out.bytes().streamInput()); assertEquals(ops.size(), readOperations.size()); assertEquals(ops, readOperations); } @@ -1217,7 +1214,7 @@ public class TranslogTests extends ESTestCase { for (int i = firstUncommitted; i < translogOperations; i++) { Translog.Operation next = snapshot.next(); assertNotNull("" + i, next); - assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); + assertEquals(Integer.parseInt(next.getSource().source.utf8ToString()), i); } assertNull(snapshot.next()); } @@ -1241,7 +1238,7 @@ public class TranslogTests extends ESTestCase { final BlockingQueue writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); for (int i = 0; i < threadCount; i++) { final int threadId = i; @@ -1268,10 +1265,10 @@ public class TranslogTests extends ESTestCase { private final int opsPerThread; private final int threadId; private final Collection writtenOperations; - private final Throwable[] threadExceptions; + private final Exception[] threadExceptions; private final Translog translog; - public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Throwable[] threadExceptions) { + public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Exception[] threadExceptions) { this.translog = translog; this.downLatch = downLatch; this.opsPerThread = opsPerThread; @@ -1307,7 +1304,7 @@ public class TranslogTests extends ESTestCase { writtenOperations.add(new LocationOperation(op, loc)); afterAdd(); } - } catch (Throwable t) { + } catch (Exception t) { threadExceptions[threadId] = t; } } @@ -1393,7 +1390,7 @@ public class TranslogTests extends ESTestCase { assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals(i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1449,7 +1446,7 @@ public class TranslogTests extends ESTestCase { final int threadCount = randomIntBetween(1, 5); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); final CountDownLatch added = new CountDownLatch(randomIntBetween(10, 100)); List writtenOperations = Collections.synchronizedList(new ArrayList<>()); @@ -1717,7 +1714,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 1; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } @@ -1728,7 +1725,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 2; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1772,7 +1769,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 1; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } @@ -1871,7 +1868,7 @@ public class TranslogTests extends ESTestCase { assertEquals(syncedDocs.size(), snapshot.totalOperations()); for (int i = 0; i < syncedDocs.size(); i++) { Translog.Operation next = snapshot.next(); - assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); + assertEquals(syncedDocs.get(i), next.getSource().source.utf8ToString()); assertNotNull("operation " + i + " must be non-null", next); } } @@ -1903,4 +1900,23 @@ public class TranslogTests extends ESTestCase { Checkpoint read = Checkpoint.read(tempDir.resolve("foo.cpk")); assertEquals(read, checkpoint); } + + /** + * Tests that closing views after the translog is fine and we can reopen the translog + */ + public void testPendingDelete() throws IOException { + translog.add(new Translog.Index("test", "1", new byte[]{1})); + translog.prepareCommit(); + Translog.TranslogGeneration generation = translog.getGeneration(); + TranslogConfig config = translog.getConfig(); + translog.close(); + translog = new Translog(config, generation); + translog.add(new Translog.Index("test", "2", new byte[]{2})); + translog.prepareCommit(); + Translog.View view = translog.newView(); + translog.add(new Translog.Index("test", "3", new byte[]{3})); + translog.close(); + IOUtils.close(view); + translog = new Translog(config, generation); + } } diff --git a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index 23925f574ff..60e062c0d1c 100644 --- a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -50,7 +50,7 @@ public class IndexActionIT extends ESIntegTestCase { public void testAutoGenerateIdNoDuplicates() throws Exception { int numberOfIterations = scaledRandomIntBetween(10, 50); for (int i = 0; i < numberOfIterations; i++) { - Throwable firstError = null; + Exception firstError = null; createIndex("test"); int numOfDocs = randomIntBetween(10, 100); logger.info("indexing [{}] docs", numOfDocs); @@ -66,19 +66,19 @@ public class IndexActionIT extends ESIntegTestCase { try { logger.debug("running search with all types"); assertHitCount(client().prepareSearch("test").get(), numOfDocs); - } catch (Throwable t) { - logger.error("search for all docs types failed", t); + } catch (Exception e) { + logger.error("search for all docs types failed", e); if (firstError == null) { - firstError = t; + firstError = e; } } try { logger.debug("running search with a specific type"); assertHitCount(client().prepareSearch("test").setTypes("type").get(), numOfDocs); - } catch (Throwable t) { - logger.error("search for all docs of a specific type failed", t); + } catch (Exception e) { + logger.error("search for all docs of a specific type failed", e); if (firstError == null) { - firstError = t; + firstError = e; } } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 1f1b758f349..7558fbd66fe 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -22,9 +22,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.IndexService; @@ -70,8 +69,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { super(Settings.builder() .put("indices.memory.interval", "200h") // disable it .put(settings) - .build(), - null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb + .build(), null, null); } public void deleteShard(IndexShard shard) { @@ -449,7 +447,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { try { assertEquals(0, imc.availableShards().size()); ShardRouting routing = newShard.routingEntry(); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertEquals(1, imc.availableShards().size()); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java index 28cc71fa422..b06103372f9 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; @@ -103,7 +103,7 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas newRouting = ShardRoutingHelper.initialize(newRouting, nodeId); IndexShard shard = index.createShard(newRouting); shard.updateRoutingEntry(newRouting); - final DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, + final DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); shard.markAsRecovering("store", new RecoveryState(shard.shardId(), newRouting.primary(), RecoveryState.Type.SNAPSHOT, newRouting.restoreSource(), localNode)); shard.recoverFromStore(); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java new file mode 100644 index 00000000000..8587b3993cd --- /dev/null +++ b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.core.TextFieldMapper; +import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.internal.IdFieldMapper; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +public class IndicesModuleTests extends ESTestCase { + + private static class FakeMapperParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + return null; + } + } + + private static class FakeMetadataMapperParser implements MetadataFieldMapper.TypeParser { + @Override + public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { + return null; + } + @Override + public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { + return null; + } + } + + List fakePlugins = Arrays.asList(new MapperPlugin() { + @Override + public Map getMappers() { + return Collections.singletonMap("fake-mapper", new FakeMapperParser()); + } + @Override + public Map getMetadataMappers() { + return Collections.singletonMap("fake-metadata-mapper", new FakeMetadataMapperParser()); + } + }); + + public void testBuiltinMappers() { + IndicesModule module = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()); + assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty()); + assertFalse(module.getMapperRegistry().getMetadataMapperParsers().isEmpty()); + } + + public void testBuiltinWithPlugins() { + IndicesModule module = new IndicesModule(new NamedWriteableRegistry(), fakePlugins); + MapperRegistry registry = module.getMapperRegistry(); + assertThat(registry.getMapperParsers().size(), Matchers.greaterThan(1)); + assertThat(registry.getMetadataMapperParsers().size(), Matchers.greaterThan(1)); + } + + public void testDuplicateBuiltinMapper() { + List plugins = Arrays.asList(new MapperPlugin() { + @Override + public Map getMappers() { + return Collections.singletonMap(TextFieldMapper.CONTENT_TYPE, new FakeMapperParser()); + } + }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new IndicesModule(new NamedWriteableRegistry(), plugins)); + assertThat(e.getMessage(), Matchers.containsString("already registered")); + } + + public void testDuplicateOtherPluginMapper() { + MapperPlugin plugin = new MapperPlugin() { + @Override + public Map getMappers() { + return Collections.singletonMap("foo", new FakeMapperParser()); + } + }; + List plugins = Arrays.asList(plugin, plugin); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new IndicesModule(new NamedWriteableRegistry(), plugins)); + assertThat(e.getMessage(), Matchers.containsString("already registered")); + } + + public void testDuplicateBuiltinMetadataMapper() { + List plugins = Arrays.asList(new MapperPlugin() { + @Override + public Map getMetadataMappers() { + return Collections.singletonMap(IdFieldMapper.NAME, new FakeMetadataMapperParser()); + } + }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new IndicesModule(new NamedWriteableRegistry(), plugins)); + assertThat(e.getMessage(), Matchers.containsString("already registered")); + } + + public void testDuplicateOtherPluginMetadataMapper() { + MapperPlugin plugin = new MapperPlugin() { + @Override + public Map getMetadataMappers() { + return Collections.singletonMap("foo", new FakeMetadataMapperParser()); + } + }; + List plugins = Arrays.asList(plugin, plugin); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new IndicesModule(new NamedWriteableRegistry(), plugins)); + assertThat(e.getMessage(), Matchers.containsString("already registered")); + } + + public void testDuplicateFieldNamesMapper() { + List plugins = Arrays.asList(new MapperPlugin() { + @Override + public Map getMetadataMappers() { + return Collections.singletonMap(FieldNamesFieldMapper.NAME, new FakeMetadataMapperParser()); + } + }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new IndicesModule(new NamedWriteableRegistry(), plugins)); + assertThat(e.getMessage(), Matchers.containsString("cannot contain metadata mapper [_field_names]")); + } + + public void testFieldNamesIsLast() { + IndicesModule module = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()); + List fieldNames = module.getMapperRegistry().getMetadataMapperParsers().keySet() + .stream().collect(Collectors.toList()); + assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); + } + + public void testFieldNamesIsLastWithPlugins() { + IndicesModule module = new IndicesModule(new NamedWriteableRegistry(), fakePlugins); + List fieldNames = module.getMapperRegistry().getMetadataMapperParsers().keySet() + .stream().collect(Collectors.toList()); + assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); + } +} diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 1cca3bb7215..d43217d9785 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -62,7 +62,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // initial cache TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); BytesReference value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -72,7 +72,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // cache hit entity = new TestEntity(requestCacheStats, reader, indexShard, 0); value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -117,7 +117,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // initial cache TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); BytesReference value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -130,7 +130,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // cache the second TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); value = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value).readString()); + assertEquals("bar", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -141,7 +141,7 @@ public class IndicesRequestCacheTests extends ESTestCase { secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); value = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value).readString()); + assertEquals("bar", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -150,7 +150,7 @@ public class IndicesRequestCacheTests extends ESTestCase { entity = new TestEntity(requestCacheStats, reader, indexShard, 0); value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -209,9 +209,9 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); size = requestCacheStats.stats().getMemorySize(); IOUtils.close(reader, secondReader, writer, dir, cache); } @@ -240,12 +240,12 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); assertEquals(2, cache.count()); assertEquals(1, requestCacheStats.stats().getEvictions()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); @@ -277,12 +277,12 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); assertEquals(3, cache.count()); final long hitCount = requestCacheStats.stats().getHitCount(); // clear all for the indexShard Idendity even though is't still open @@ -292,7 +292,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // third has not been validated since it's a different identity value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); diff --git a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index 59d86ddce67..fea69133377 100644 --- a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -70,7 +70,7 @@ public class TermsLookupTests extends ESTestCase { TermsLookup termsLookup = randomTermsLookup(); try (BytesStreamOutput output = new BytesStreamOutput()) { termsLookup.writeTo(output); - try (StreamInput in = StreamInput.wrap(output.bytes())) { + try (StreamInput in = output.bytes().streamInput()) { TermsLookup deserializedLookup = new TermsLookup(in); assertEquals(deserializedLookup, termsLookup); assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java similarity index 87% rename from core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java rename to core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index e20e7d1a7c9..869ac622b39 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -36,9 +36,20 @@ import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.Analysis; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.MappingCharFilterFactory; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.PatternReplaceCharFilterFactory; +import org.elasticsearch.index.analysis.StandardTokenizerFactory; +import org.elasticsearch.index.analysis.StopTokenFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.indices.analysis.HunspellService; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; @@ -49,9 +60,11 @@ import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Collections; +import java.util.Map; import java.util.Set; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -72,8 +85,16 @@ public class AnalysisModuleTests extends ModuleTestCase { } public AnalysisRegistry getNewRegistry(Settings settings) { - return new AnalysisRegistry(null, new Environment(settings), - Collections.emptyMap(), Collections.singletonMap("myfilter", MyFilterTokenFilterFactory::new), Collections.emptyMap(), Collections.emptyMap()); + try { + return new AnalysisModule(new Environment(settings), singletonList(new AnalysisPlugin() { + @Override + public Map> getTokenFilters() { + return singletonMap("myfilter", MyFilterTokenFilterFactory::new); + } + })).getAnalysisRegistry(); + } catch (IOException e) { + throw new RuntimeException(e); + } } private Settings loadFromClasspath(String path) throws IOException { @@ -125,7 +146,7 @@ public class AnalysisModuleTests extends ModuleTestCase { assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), analysisService2.analyzer("custom7").analyzer().getVersion()); } - private void assertTokenFilter(String name, Class clazz) throws IOException { + private void assertTokenFilter(String name, Class clazz) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); @@ -148,17 +169,9 @@ public class AnalysisModuleTests extends ModuleTestCase { StopTokenFilterFactory stop1 = (StopTokenFilterFactory) custom1.tokenFilters()[0]; assertThat(stop1.stopWords().size(), equalTo(1)); - //assertThat((Iterable) stop1.stopWords(), hasItem("test-stop".toCharArray())); analyzer = analysisService.analyzer("custom2").analyzer(); assertThat(analyzer, instanceOf(CustomAnalyzer.class)); - CustomAnalyzer custom2 = (CustomAnalyzer) analyzer; - -// HtmlStripCharFilterFactory html = (HtmlStripCharFilterFactory) custom2.charFilters()[0]; -// assertThat(html.readAheadLimit(), equalTo(HTMLStripCharFilter.DEFAULT_READ_AHEAD)); -// -// html = (HtmlStripCharFilterFactory) custom2.charFilters()[1]; -// assertThat(html.readAheadLimit(), equalTo(1024)); // verify position increment gap analyzer = analysisService.analyzer("custom6").analyzer(); @@ -248,7 +261,8 @@ public class AnalysisModuleTests extends ModuleTestCase { getAnalysisService(settings); fail("This should fail with IllegalArgumentException because the analyzers name starts with _"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")).or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); + assertThat(e.getMessage(), either(equalTo("analyzer name must not start with '_'. got \"_invalid_name\"")) + .or(equalTo("analyzer name must not start with '_'. got \"_invalidName\""))); } } @@ -289,13 +303,18 @@ public class AnalysisModuleTests extends ModuleTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); Environment environment = new Environment(settings); - AnalysisModule module = new AnalysisModule(environment); InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff"); InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic"); + Dictionary dictionary; try (Directory tmp = new SimpleFSDirectory(environment.tmpFile())) { - Dictionary dictionary = new Dictionary(tmp, "hunspell", aff, dic); - module.registerHunspellDictionary("foo", dictionary); - assertInstanceBinding(module, HunspellService.class, (x) -> x.getDictionary("foo") == dictionary); + dictionary = new Dictionary(tmp, "hunspell", aff, dic); } + AnalysisModule module = new AnalysisModule(environment, singletonList(new AnalysisPlugin() { + @Override + public Map getHunspellDictionaries() { + return singletonMap("foo", dictionary); + } + })); + assertSame(dictionary, module.getHunspellService().getDictionary("foo")); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java index 86c54f2ece9..bbfeacfc590 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java @@ -19,15 +19,38 @@ package org.elasticsearch.indices.analysis; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class DummyAnalysisPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("dummy", (a, b, c, d) -> new DummyAnalyzerProvider()); - module.registerTokenFilter("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); - module.registerTokenizer("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); - module.registerCharFilter("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); +import static java.util.Collections.singletonMap; + +public class DummyAnalysisPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getCharFilters() { + return singletonMap("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); + } + + @Override + public Map> getTokenFilters() { + return singletonMap("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); + } + + @Override + public Map> getTokenizers() { + return singletonMap("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("dummy", (a, b, c, d) -> new DummyAnalyzerProvider()); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 4312dd6105e..5e636bed939 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -456,9 +456,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { .setAnalyzer("not_exist_analyzer") .get(); fail("shouldn't get here"); - } catch (Throwable t) { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), startsWith("failed to find global analyzer")); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("failed to find global analyzer")); } diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java similarity index 58% rename from core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java rename to core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java index 84ce2c5da50..ba4467a5630 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java @@ -19,36 +19,30 @@ package org.elasticsearch.indices.analyze; import org.apache.lucene.analysis.hunspell.Dictionary; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.analysis.HunspellService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.hamcrest.Matchers; +import org.elasticsearch.test.ESTestCase; +import static java.util.Collections.emptyMap; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_IGNORE_CASE; import static org.elasticsearch.indices.analysis.HunspellService.HUNSPELL_LAZY_LOAD; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.notNullValue; -/** - * - */ -@ClusterScope(scope= Scope.TEST, numDataNodes=0) -public class HunspellServiceIT extends ESIntegTestCase { +public class HunspellServiceTests extends ESTestCase { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .put(HUNSPELL_IGNORE_CASE.getKey(), true) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - internalCluster().startNode(settings); - Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); + Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(true, dictionary); + assertTrue(dictionary.getIgnoreCase()); } public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { @@ -58,58 +52,42 @@ public class HunspellServiceIT extends ESIntegTestCase { .put(HUNSPELL_IGNORE_CASE.getKey(), true) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) .put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - internalCluster().startNode(settings); - Dictionary dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); + Dictionary dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(false, dictionary); - - + assertFalse(dictionary.getIgnoreCase()); // testing that dictionary specific settings override node level settings - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US_custom"); + dictionary = new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US_custom"); assertThat(dictionary, notNullValue()); - assertIgnoreCase(true, dictionary); + assertTrue(dictionary.getIgnoreCase()); } public void testDicWithNoAff() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - Dictionary dictionary = null; - try { - internalCluster().startNode(settings); - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); - fail("Missing affix file didn't throw an error"); - } - catch (Throwable t) { - assertNull(dictionary); - assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Missing affix file")); - } + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US")); + assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage()); + assertThat(e.getCause(), hasToString(containsString("Missing affix file"))); } public void testDicWithTwoAffs() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir")) .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - Dictionary dictionary = null; - try { - internalCluster().startNode(settings); - dictionary = internalCluster().getInstance(HunspellService.class).getDictionary("en_US"); - fail("Multiple affix files didn't throw an error"); - } catch (Throwable t) { - assertNull(dictionary); - assertThat(ExceptionsHelper.unwrap(t, ElasticsearchException.class).toString(), Matchers.containsString("Too many affix files")); - } - } - - // TODO: on next upgrade of lucene, just use new getter - private void assertIgnoreCase(boolean expected, Dictionary dictionary) throws Exception { - // assertEquals(expected, dictionary.getIgnoreCase()); + IllegalStateException e = expectThrows(IllegalStateException.class, + () -> new HunspellService(settings, new Environment(settings), emptyMap()).getDictionary("en_US")); + assertEquals("failed to load hunspell dictionary for locale: en_US", e.getMessage()); + assertThat(e.getCause(), hasToString(containsString("Too many affix files"))); } } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 08ef691daa7..8325aea135a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -206,7 +206,8 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC } @Override - public @Nullable MockIndexService indexService(Index index) { + @Nullable + public MockIndexService indexService(Index index) { return indices.get(index.getUUID()); } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index f446aa9a80a..9fd33819f38 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -160,7 +160,7 @@ class ClusterStateChanges { metaDataIndexUpgradeService, nodeServicesProvider, indicesService); MetaDataDeleteIndexService deleteIndexService = new MetaDataDeleteIndexService(settings, clusterService, allocationService); MetaDataUpdateSettingsService metaDataUpdateSettingsService = new MetaDataUpdateSettingsService(settings, clusterService, - allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, new IndexNameExpressionResolver(settings)); + allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, nodeServicesProvider); MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService, allocationService, new AliasValidator(settings), Collections.emptySet(), environment, nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 6e3734f2708..e67d9eaf48a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation.FailedShard; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.recovery.RecoveryTargetService; import org.elasticsearch.repositories.RepositoriesService; @@ -124,7 +124,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice for (Iterator> it = clusterStateServiceMap.entrySet().iterator(); it.hasNext(); ) { DiscoveryNode node = it.next().getKey(); - if (state.nodes().nodeExists(node.getId()) == false) { + if (state.nodes().nodeExists(node) == false) { it.remove(); } } @@ -255,7 +255,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice for (DiscoveryNode.Role mustHaveRole : mustHaveRoles) { roles.add(mustHaveRole); } - return new DiscoveryNode("node_" + randomAsciiOfLength(8), DummyTransportAddress.INSTANCE, Collections.emptyMap(), roles, + return new DiscoveryNode("node_" + randomAsciiOfLength(8), LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); } diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 8c724efdfc7..5f7f26cd38c 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -68,14 +68,14 @@ public class FlushIT extends ESIntegTestCase { // don't use assertAllSuccessful it uses a randomized context that belongs to a different thread assertThat("Unexpected ShardFailures: " + Arrays.toString(flushResponse.getShardFailures()), flushResponse.getFailedShards(), equalTo(0)); latch.countDown(); - } catch (Throwable ex) { + } catch (Exception ex) { onFailure(ex); } } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { errors.add(e); latch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 5932434438c..ea2a80bada5 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.indices.flush; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -31,9 +32,11 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; /** */ @@ -103,7 +106,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertTrue(response.success()); } - public void testSyncFailsIfOperationIsInFlight() throws InterruptedException { + public void testSyncFailsIfOperationIsInFlight() throws InterruptedException, ExecutionException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); @@ -111,7 +114,9 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); - try (Releasable operationLock = shard.acquirePrimaryOperationLock()) { + PlainActionFuture fut = new PlainActionFuture<>(); + shard.acquirePrimaryOperationLock(fut, ThreadPool.Names.INDEX); + try (Releasable operationLock = fut.get()) { SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener<>(); flushService.attemptSyncedFlush(shardId, listener); listener.latch.await(); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index 485ec020c3f..b71ba63a157 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -57,7 +57,7 @@ public class SyncedFlushUtil { public static final class LatchedListener implements ActionListener { public volatile T result; - public volatile Throwable error; + public volatile Exception error; public final CountDownLatch latch = new CountDownLatch(1); @Override @@ -67,7 +67,7 @@ public class SyncedFlushUtil { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { error = e; latch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 2981f2d110c..eeaeb84d9a9 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -69,7 +69,7 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwable.add(e); latch.countDown(); } @@ -83,4 +83,4 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 68a176e22c3..91fd7bb972b 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -43,6 +43,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; @@ -258,7 +259,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { // not all shards are allocated with the initial create index. Wait for it.. ensureYellow(); - final Throwable[] threadException = new Throwable[1]; + final AtomicReference threadException = new AtomicReference<>(); final AtomicBoolean stop = new AtomicBoolean(false); Thread[] threads = new Thread[3]; final CyclicBarrier barrier = new CyclicBarrier(threads.length); @@ -298,8 +299,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { assertThat(mappings.containsKey(typeName), equalTo(true)); assertThat(((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName)); } - } catch (Throwable t) { - threadException[0] = t; + } catch (Exception e) { + threadException.set(e); stop.set(true); } } @@ -311,8 +312,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { for (Thread t : threads) t.join(); - if (threadException[0] != null) { - throw threadException[0]; + if (threadException.get() != null) { + throw threadException.get(); } } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 0ae8f71c742..b448f35c21b 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -62,7 +64,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.startsWith; @@ -70,7 +71,7 @@ import static org.hamcrest.Matchers.startsWith; /** * Integration tests for InternalCircuitBreakerService */ -@ClusterScope(scope = TEST, randomDynamicTemplates = false) +@ClusterScope(scope = TEST, randomDynamicTemplates = false, numClientNodes = 0, maxNumDataNodes = 1) public class CircuitBreakerServiceIT extends ESIntegTestCase { /** Reset all breaker settings back to their defaults */ private void reset() { @@ -266,17 +267,26 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Perform a search to load field data for the "test" field try { - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); - fail("should have thrown an exception"); + SearchResponse searchResponse = client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + if (searchResponse.getShardFailures().length > 0) { + // each shard must have failed with CircuitBreakingException + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + Throwable cause = ExceptionsHelper.unwrap(shardSearchFailure.getCause(), CircuitBreakingException.class); + assertThat(cause, instanceOf(CircuitBreakingException.class)); + assertEquals(((CircuitBreakingException) cause).getByteLimit(), 500L); + } + } else { + fail("should have thrown a CircuitBreakingException"); + } } catch (Exception e) { - final Throwable cause = ExceptionsHelper.unwrap(e, CircuitBreakingException.class); - assertNotNull("CircuitBreakingException is not the cause of " + e, cause); - String errMsg = "would be larger than limit of [500/500b]]"; - assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException", + Throwable cause = ExceptionsHelper.unwrap(e, CircuitBreakingException.class); + assertThat(cause, instanceOf(CircuitBreakingException.class)); + assertEquals(((CircuitBreakingException) cause).getByteLimit(), 500L); + assertThat("Exception: [" + cause.toString() + "] should be caused by the parent circuit breaker", cause.toString(), startsWith("CircuitBreakingException[[parent] Data too large")); - assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException", - cause.toString(), endsWith(errMsg)); } + + reset(); } public void testRequestBreaker() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 0d6d5122006..2c52cd33015 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.DirectoryService; @@ -69,8 +69,8 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Store store = newStore(createTempDir()); RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, recoverySettings.getChunkSize().bytesAsInt(), @@ -119,8 +119,8 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Path tempDir = createTempDir(); Store store = newStore(tempDir, false); @@ -182,8 +182,8 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Path tempDir = createTempDir(); Store store = newStore(tempDir, false); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java index 8552db2d376..d0401196b95 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState.File; import org.elasticsearch.indices.recovery.RecoveryState.Index; @@ -57,9 +57,9 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class RecoveryTargetTests extends ESTestCase { abstract class Streamer extends Thread { private T lastRead; - final private AtomicBoolean shouldStop; - final private T source; - final AtomicReference error = new AtomicReference<>(); + private final AtomicBoolean shouldStop; + private final T source; + final AtomicReference error = new AtomicReference<>(); final Version streamVersion; Streamer(AtomicBoolean shouldStop, T source) { @@ -73,7 +73,7 @@ public class RecoveryTargetTests extends ESTestCase { } public T lastRead() throws Throwable { - Throwable t = error.get(); + Exception t = error.get(); if (t != null) { throw t; } @@ -84,7 +84,7 @@ public class RecoveryTargetTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); source.writeTo(out); out.close(); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); T obj = deserialize(in); lastRead = obj; return obj; @@ -105,8 +105,8 @@ public class RecoveryTargetTests extends ESTestCase { serializeDeserialize(); } serializeDeserialize(); - } catch (Throwable t) { - error.set(t); + } catch (Exception e) { + error.set(e); } } } @@ -339,7 +339,8 @@ public class RecoveryTargetTests extends ESTestCase { } public void testStageSequenceEnforcement() { - final DiscoveryNode discoveryNode = new DiscoveryNode("1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final DiscoveryNode discoveryNode = new DiscoveryNode("1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); Stage[] stages = Stage.values(); int i = randomIntBetween(0, stages.length - 1); int j; diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index e7e3cb32226..2a67742fc68 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -29,7 +29,9 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.MergePolicyConfig; @@ -37,9 +39,13 @@ import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; @@ -53,6 +59,42 @@ import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { + + public void testInvalidDynamicUpdate() { + createIndex("test"); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder() + .put("index.dummy", "boom") + ) + .execute().actionGet()); + assertEquals(exception.getCause().getMessage(), "this setting goes boom"); + IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertNotEquals(indexMetaData.getSettings().get("index.dummy"), "invalid dynamic value"); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(DummySettingPlugin.class); + } + + public static class DummySettingPlugin extends Plugin { + public static final Setting DUMMY_SETTING = Setting.simpleString("index.dummy", + Setting.Property.IndexScope, Setting.Property.Dynamic); + @Override + public void onIndexModule(IndexModule indexModule) { + indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> { + if (s.equals("boom")) + throw new IllegalArgumentException("this setting goes boom"); + }); + } + + @Override + public List> getSettings() { + return Collections.singletonList(DUMMY_SETTING); + } + } + public void testResetDefault() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 81c50cc4f9c..2ad8ebb52f9 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -42,7 +42,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; @@ -126,7 +126,7 @@ public class RareClusterStateIT extends ESIntegTestCase { // inject a node ClusterState.Builder builder = ClusterState.builder(currentState); builder.nodes(DiscoveryNodes.builder(currentState.nodes()).put(new DiscoveryNode("_non_existent", - DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT))); + LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT))); // open index final IndexMetaData indexMetaData = IndexMetaData.builder(currentState.metaData().index(index)).state(IndexMetaData.State.OPEN).build(); @@ -145,7 +145,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); @@ -165,7 +165,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); @@ -260,7 +260,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { putMappingResponse.set(e); } }); @@ -292,7 +292,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { docIndexResponse.set(e); } }); @@ -376,7 +376,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { putMappingResponse.set(e); } }); @@ -403,7 +403,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { docIndexResponse.set(e); } }); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 6bce95af184..8e88aff523c 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -647,7 +647,7 @@ public class IndexStatsIT extends ESIntegTestCase { flags.writeTo(out); out.close(); BytesReference bytes = out.bytes(); - CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(StreamInput.wrap(bytes)); + CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(bytes.streamInput()); for (Flag flag : values) { assertThat(flags.isSet(flag), equalTo(readStats.isSet(flag))); } @@ -661,7 +661,7 @@ public class IndexStatsIT extends ESIntegTestCase { flags.writeTo(out); out.close(); BytesReference bytes = out.bytes(); - CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(StreamInput.wrap(bytes)); + CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(bytes.streamInput()); for (Flag flag : values) { assertThat(flags.isSet(flag), equalTo(readStats.isSet(flag))); } diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index b500ffb7dd2..ad26ec71226 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -263,7 +263,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertThat(waitForShardDeletion(node_3, index, 0), equalTo(false)); Path server2Shard = shardDirectory(node_2, index, 0); - logger.info("--> stopping node " + node_2); + logger.info("--> stopping node {}", node_2); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(node_2)); logger.info("--> running cluster_health"); @@ -426,7 +426,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); waitNoPendingTasksOnAll(); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index e558f0f2a12..96af4ef3671 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -57,7 +57,7 @@ import static org.elasticsearch.test.VersionUtils.randomVersion; /** */ public class IndicesStoreTests extends ESTestCase { - private final static ShardRoutingState[] NOT_STARTED_STATES; + private static final ShardRoutingState[] NOT_STARTED_STATES; static { Set set = new HashSet<>(); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index acdfdd12266..a5ec8e4ecd7 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.InvalidAliasNameException; -import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; diff --git a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index fa78d5aa16c..99c407c18b7 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -21,10 +21,8 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; import org.junit.Before; import java.util.ArrayList; @@ -98,7 +96,7 @@ public class ConfigurationUtilsTests extends ESTestCase { public void testReadProcessors() throws Exception { Processor processor = mock(Processor.class); ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); - builder.registerProcessor("test_processor", (registry) -> config -> processor); + builder.registerProcessor("test_processor", (registry) -> (tag, config) -> processor); ProcessorsRegistry registry = builder.build(mock(ScriptService.class), mock(ClusterService.class)); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestCloseIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestCloseIT.java deleted file mode 100644 index ed0a302cf73..00000000000 --- a/core/src/test/java/org/elasticsearch/ingest/IngestCloseIT.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.node.NodeModule; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; - -import java.io.Closeable; -import java.io.IOException; -import java.util.Collection; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.hamcrest.Matchers.is; - -public class IngestCloseIT extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(IngestPlugin.class); - } - - private static AtomicBoolean called = new AtomicBoolean(false); - - public void testCloseNode() throws Exception { - // We manually stop the node and check we called - stopNode(); - - assertThat(called.get(), is(true)); - - // We need to restart the node for the next tests (and because tearDown() expects a Node) - startNode(); - } - - public static class IngestPlugin extends Plugin { - public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor("test", (registry) -> new Factory()); - } - } - - public static final class Factory extends AbstractProcessorFactory implements Closeable { - @Override - protected TestProcessor doCreate(String tag, Map config) throws Exception { - return new TestProcessor("id", "test", ingestDocument -> { - throw new UnsupportedOperationException("this code is actually never called from the test"); - }); - } - - @Override - public void close() throws IOException { - called.set(true); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index 119e94580ad..9974dd568a8 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -62,7 +62,7 @@ public class IngestStatsTests extends ESTestCase { private IngestStats serialize(IngestStats stats) throws IOException { BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); return new IngestStats(in); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index d001a829443..53964132abe 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -74,7 +74,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteIndexPipelineDoesNotExist() { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); try { @@ -83,7 +83,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); } - verify(failureHandler, never()).accept(any(Throwable.class)); + verify(failureHandler, never()).accept(any(Exception.class)); verify(completionHandler, never()).accept(anyBoolean()); } @@ -98,9 +98,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist"); bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") - BiConsumer failureHandler = mock(BiConsumer.class); + BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), failureHandler, completionHandler); verify(failureHandler, times(1)).accept( argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { @@ -126,7 +126,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -134,13 +134,30 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, times(1)).accept(true); } + public void testExecuteEmptyPipeline() throws Exception { + CompoundProcessor processor = mock(CompoundProcessor.class); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); + when(processor.getProcessors()).thenReturn(Collections.emptyList()); + + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); + @SuppressWarnings("unchecked") + Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + Consumer completionHandler = mock(Consumer.class); + executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); + verify(processor, never()).execute(any()); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); + } + public void testExecutePropagateAllMetaDataUpdates() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); doAnswer((InvocationOnMock invocationOnMock) -> { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { if (metaData == IngestDocument.MetaData.TTL) { - ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "5w"); + ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "35d"); } else { ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); } @@ -152,7 +169,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -171,11 +188,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteFailure() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -195,7 +213,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -213,7 +231,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -235,7 +253,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -250,7 +268,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -266,7 +284,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -280,7 +298,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id") .source(Collections.emptyMap()) .ttl(1000L); - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -313,12 +331,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { } CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); Exception error = new RuntimeException(); doThrow(error).when(processor).execute(any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); - Consumer completionHandler = mock(Consumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error)); @@ -339,9 +358,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -356,15 +375,15 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(ingestStats.getTotalStats().getIngestFailedCount(), equalTo(0L)); assertThat(ingestStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L)); - when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, new CompoundProcessor())); - when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, new CompoundProcessor())); + when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, new CompoundProcessor(mock(Processor.class)))); + when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, new CompoundProcessor(mock(Processor.class)))); Map configurationMap = new HashMap<>(); configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"))); configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"))); executionService.updatePipelineStats(new IngestMetadata(configurationMap)); - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); IndexRequest indexRequest = new IndexRequest("_index"); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index cb7bd849a47..ced25419d92 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -40,7 +41,7 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map processorConfig0 = new HashMap<>(); Map processorConfig1 = new HashMap<>(); - processorConfig0.put(AbstractProcessorFactory.TAG_KEY, "first-processor"); + processorConfig0.put(ConfigurationUtils.TAG_KEY, "first-processor"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, @@ -69,6 +70,17 @@ public class PipelineFactoryTests extends ESTestCase { } } + public void testCreateWithEmptyProcessorsField() throws Exception { + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.emptyList()); + Pipeline.Factory factory = new Pipeline.Factory(); + Pipeline pipeline = factory.create("_id", pipelineConfig, null); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors(), is(empty())); + } + public void testCreateWithPipelineOnFailure() throws Exception { Map processorConfig = new HashMap<>(); Map pipelineConfig = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 55ea4360ece..600e6dbb51e 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -58,7 +58,7 @@ public class PipelineStoreTests extends ESTestCase { public void init() throws Exception { store = new PipelineStore(Settings.EMPTY); ProcessorsRegistry.Builder registryBuilder = new ProcessorsRegistry.Builder(); - registryBuilder.registerProcessor("set", (registry) -> config -> { + registryBuilder.registerProcessor("set", (registry) -> (tag, config) -> { String field = (String) config.remove("field"); String value = (String) config.remove("value"); return new Processor() { @@ -78,7 +78,7 @@ public class PipelineStoreTests extends ESTestCase { } }; }); - registryBuilder.registerProcessor("remove", (registry) -> config -> { + registryBuilder.registerProcessor("remove", (registry) -> (tag, config) -> { String field = (String) config.remove("field"); return new Processor() { @Override @@ -259,7 +259,7 @@ public class PipelineStoreTests extends ESTestCase { store.validatePipeline(ingestInfos, putRequest); fail("exception expected"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [{_node_id2}{local}{local[_id]}]")); + assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [" + node2 + "]")); } ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java index a1f4d381911..94a56d11933 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java @@ -54,9 +54,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { public void testNegativeSetting() throws InterruptedException { String collector = randomAsciiOfLength(5); Settings settings = Settings.builder().put("monitor.jvm.gc.collector." + collector + ".warn", "-" + randomTimeValue()).build(); - execute(settings, (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), allOf(containsString("invalid gc_threshold"), containsString("for [monitor.jvm.gc.collector." + collector + "."))); + execute(settings, (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), allOf(containsString("invalid gc_threshold"), containsString("for [monitor.jvm.gc.collector." + collector + "."))); }, true, null); } @@ -74,9 +74,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { } // we should get an exception that a setting is missing - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("missing gc_threshold for [monitor.jvm.gc.collector." + collector + ".")); + execute(builder.build(), (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("missing gc_threshold for [monitor.jvm.gc.collector." + collector + ".")); }, true, null); } @@ -84,18 +84,18 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { for (final String threshold : new String[] { "warn", "info", "debug" }) { final Settings.Builder builder = Settings.builder(); builder.put("monitor.jvm.gc.overhead." + threshold, randomIntBetween(Integer.MIN_VALUE, -1)); - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be >= 0")); + execute(builder.build(), (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be >= 0")); }, true, null); } for (final String threshold : new String[] { "warn", "info", "debug" }) { final Settings.Builder builder = Settings.builder(); builder.put("monitor.jvm.gc.overhead." + threshold, randomIntBetween(100 + 1, Integer.MAX_VALUE)); - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be <= 100")); + execute(builder.build(), (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be <= 100")); }, true, null); } @@ -104,9 +104,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.info", info); final int warn = randomIntBetween(1, info - 1); infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", warn); - execute(infoWarnOutOfOrderBuilder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.warn] must be greater than [monitor.jvm.gc.overhead.info] [" + info + "] but was [" + warn + "]")); + execute(infoWarnOutOfOrderBuilder.build(), (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("[monitor.jvm.gc.overhead.warn] must be greater than [monitor.jvm.gc.overhead.info] [" + info + "] but was [" + warn + "]")); }, true, null); final Settings.Builder debugInfoOutOfOrderBuilder = Settings.builder(); @@ -114,9 +114,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { final int debug = randomIntBetween(info + 1, 99); debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.debug", debug); debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", randomIntBetween(debug + 1, 100)); // or the test will fail for the wrong reason - execute(debugInfoOutOfOrderBuilder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.info] must be greater than [monitor.jvm.gc.overhead.debug] [" + debug + "] but was [" + info + "]")); + execute(debugInfoOutOfOrderBuilder.build(), (command, interval) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("[monitor.jvm.gc.overhead.info] must be greater than [monitor.jvm.gc.overhead.debug] [" + debug + "] but was [" + info + "]")); }, true, null); } @@ -124,7 +124,7 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { execute(settings, scheduler, null, false, asserts); } - private static void execute(Settings settings, BiFunction> scheduler, Consumer consumer, boolean constructionShouldFail, Runnable asserts) throws InterruptedException { + private static void execute(Settings settings, BiFunction> scheduler, Consumer consumer, boolean constructionShouldFail, Runnable asserts) throws InterruptedException { assert constructionShouldFail == (consumer != null); assert constructionShouldFail == (asserts == null); ThreadPool threadPool = null; @@ -143,7 +143,7 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { service.doStart(); asserts.run(); service.doStop(); - } catch (Throwable t) { + } catch (Exception t) { consumer.accept(t); } } finally { diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java index 91862e9cd18..278a47ed21f 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java @@ -48,10 +48,10 @@ public class JvmMonitorTests extends ESTestCase { AtomicBoolean invoked = new AtomicBoolean(); JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { invoked.set(true); - assertThat(t, instanceOf(RuntimeException.class)); - assertThat(t, hasToString(containsString("simulated"))); + assertThat(e, instanceOf(RuntimeException.class)); + assertThat(e, hasToString(containsString("simulated"))); } @Override @@ -174,7 +174,7 @@ public class JvmMonitorTests extends ESTestCase { JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(gcThresholds, IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { } @Override @@ -284,7 +284,7 @@ public class JvmMonitorTests extends ESTestCase { final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { } @Override @@ -358,7 +358,7 @@ public class JvmMonitorTests extends ESTestCase { final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), gcOverheadThreshold) { @Override - void onMonitorFailure(final Throwable t) { + void onMonitorFailure(final Exception e) { } @Override diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 0d07bcf0981..090517adfcd 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -70,7 +70,7 @@ public class NodeInfoStreamingTests extends ESTestCase { out.setVersion(version); nodeInfo.writeTo(out); out.close(); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(version); NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); assertExpectedUnchanged(nodeInfo, readNodeInfo); @@ -81,11 +81,6 @@ public class NodeInfoStreamingTests extends ESTestCase { assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); - assertThat(nodeInfo.getServiceAttributes().size(), equalTo(readNodeInfo.getServiceAttributes().size())); - for (Map.Entry entry : nodeInfo.getServiceAttributes().entrySet()) { - assertNotNull(readNodeInfo.getServiceAttributes().get(entry.getKey())); - assertThat(readNodeInfo.getServiceAttributes().get(entry.getKey()), equalTo(entry.getValue())); - } compareJsonOutput(nodeInfo.getHttp(), readNodeInfo.getHttp()); compareJsonOutput(nodeInfo.getJvm(), readNodeInfo.getJvm()); compareJsonOutput(nodeInfo.getProcess(), readNodeInfo.getProcess()); @@ -122,7 +117,7 @@ public class NodeInfoStreamingTests extends ESTestCase { private NodeInfo createNodeInfo() { Build build = Build.CURRENT; - DiscoveryNode node = new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, + DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), VersionUtils.randomVersion(random())); Map serviceAttributes = new HashMap<>(); serviceAttributes.put("test", "attribute"); @@ -134,7 +129,7 @@ public class NodeInfoStreamingTests extends ESTestCase { threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); Map profileAddresses = new HashMap<>(); - BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE}, DummyTransportAddress.INSTANCE); + BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); @@ -149,6 +144,7 @@ public class NodeInfoStreamingTests extends ESTestCase { // pick a random long that sometimes exceeds an int: indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L<<40)-1)); } - return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, settings, osInfo, process, jvm, + threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer); } } diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index 0cc30f8d569..0916cad60d5 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -24,9 +24,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESIntegTestCase; import java.util.List; diff --git a/core/src/test/java/org/elasticsearch/plugins/ProgressInputStreamTests.java b/core/src/test/java/org/elasticsearch/plugins/ProgressInputStreamTests.java new file mode 100644 index 00000000000..813921963c0 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/plugins/ProgressInputStreamTests.java @@ -0,0 +1,116 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; + +public class ProgressInputStreamTests extends ESTestCase { + + private List progresses = new ArrayList<>(); + + public void testThatProgressListenerIsCalled() throws Exception { + ProgressInputStream is = newProgressInputStream(0); + is.checkProgress(-1); + + assertThat(progresses, hasSize(1)); + assertThat(progresses, hasItems(100)); + } + + public void testThatProgressListenerIsCalledOnUnexpectedCompletion() throws Exception { + ProgressInputStream is = newProgressInputStream(2); + is.checkProgress(-1); + assertThat(progresses, hasItems(100)); + } + + public void testThatProgressListenerReturnsMaxValueOnWrongExpectedSize() throws Exception { + ProgressInputStream is = newProgressInputStream(2); + + is.checkProgress(1); + assertThat(progresses, hasItems(50)); + + is.checkProgress(3); + assertThat(progresses, hasItems(50, 99)); + + is.checkProgress(-1); + assertThat(progresses, hasItems(50, 99, 100)); + } + + public void testOneByte() throws Exception { + ProgressInputStream is = newProgressInputStream(1); + is.checkProgress(1); + is.checkProgress(-1); + + assertThat(progresses, hasItems(99, 100)); + + } + + public void testOddBytes() throws Exception { + int odd = randomIntBetween(10, 100) * 2 + 1; + ProgressInputStream is = newProgressInputStream(odd); + for (int i = 0; i < odd; i++) { + is.checkProgress(1); + } + is.checkProgress(-1); + + assertThat(progresses, hasSize(Math.min(odd + 1, 100))); + assertThat(progresses, hasItem(100)); + } + + public void testEvenBytes() throws Exception { + int even = randomIntBetween(10, 100) * 2; + ProgressInputStream is = newProgressInputStream(even); + + for (int i = 0; i < even; i++) { + is.checkProgress(1); + } + is.checkProgress(-1); + + assertThat(progresses, hasSize(Math.min(even + 1, 100))); + assertThat(progresses, hasItem(100)); + } + + public void testOnProgressCannotBeCalledMoreThanOncePerPercent() throws Exception { + int count = randomIntBetween(150, 300); + ProgressInputStream is = newProgressInputStream(count); + + for (int i = 0; i < count; i++) { + is.checkProgress(1); + } + is.checkProgress(-1); + + assertThat(progresses, hasSize(100)); + } + + private ProgressInputStream newProgressInputStream(int expectedSize) { + return new ProgressInputStream(null, expectedSize) { + @Override + public void onProgress(int percent) { + progresses.add(percent); + } + }; + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java index 701d1154587..9dfd5b6a93a 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java @@ -19,12 +19,17 @@ package org.elasticsearch.plugins.responseheader; -import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestHandler; -public class TestResponseHeaderPlugin extends Plugin { +import java.util.List; - public void onModule(NetworkModule module) { - module.registerRestHandler(TestResponseHeaderRestAction.class); +import static java.util.Collections.singletonList; + +public class TestResponseHeaderPlugin extends Plugin implements ActionPlugin { + @Override + public List> getRestHandlers() { + return singletonList(TestResponseHeaderRestAction.class); } } diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java index 39432bd01ea..499b6fadc93 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java +++ b/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugins.responseheader; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; @@ -32,13 +32,13 @@ import org.elasticsearch.rest.RestStatus; public class TestResponseHeaderRestAction extends BaseRestHandler { @Inject - public TestResponseHeaderRestAction(Settings settings, RestController controller, Client client) { - super(settings, client); + public TestResponseHeaderRestAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(RestRequest.Method.GET, "/_protected", this); } @Override - public void handleRequest(RestRequest request, RestChannel channel, Client client) { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { if ("password".equals(request.header("Secret"))) { RestResponse response = new BytesRestResponse(RestStatus.OK, "Access granted"); response.addHeader("Secret", "granted"); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java index 2564b31488b..d56e1341165 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -30,16 +30,13 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveriesCollection; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.indices.recovery.RecoveryTargetService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; -import java.util.ArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -47,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; public class RecoveriesCollectionTests extends ESSingleNodeTestCase { - final static RecoveryTargetService.RecoveryListener listener = new RecoveryTargetService.RecoveryListener() { + static final RecoveryTargetService.RecoveryListener listener = new RecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -135,7 +132,8 @@ public class RecoveriesCollectionTests extends ESSingleNodeTestCase { long startRecovery(RecoveriesCollection collection, RecoveryTargetService.RecoveryListener listener, TimeValue timeValue) { IndicesService indexServices = getInstanceFromNode(IndicesService.class); IndexShard indexShard = indexServices.indexServiceSafe(resolveIndex("test")).getShardOrNull(0); - final DiscoveryNode sourceNode = new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final DiscoveryNode sourceNode = new DiscoveryNode("id", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); return collection.startRecovery(indexShard, sourceNode, listener, timeValue); } } diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 6db3573d3a4..a765d599bd0 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.recovery; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.English; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -93,7 +94,6 @@ import static org.hamcrest.Matchers.startsWith; public class RelocationIT extends ESIntegTestCase { private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES); - @Override protected Collection> nodePlugins() { return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class); @@ -431,22 +431,23 @@ public class RelocationIT extends ESIntegTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18553") public void testIndexAndRelocateConcurrently() throws ExecutionException, InterruptedException { + int halfNodes = randomIntBetween(1, 3); Settings blueSetting = Settings.builder().put("node.attr.color", "blue").build(); - InternalTestCluster.Async> blueFuture = internalCluster().startNodesAsync(blueSetting, blueSetting); + InternalTestCluster.Async> blueFuture = internalCluster().startNodesAsync(halfNodes, blueSetting); Settings redSetting = Settings.builder().put("node.attr.color", "red").build(); - InternalTestCluster.Async> redFuture = internalCluster().startNodesAsync(redSetting, redSetting); + InternalTestCluster.Async> redFuture = internalCluster().startNodesAsync(halfNodes, redSetting); blueFuture.get(); redFuture.get(); logger.info("blue nodes: {}", blueFuture.get()); logger.info("red nodes: {}", redFuture.get()); - ensureStableCluster(4); + ensureStableCluster(halfNodes * 2); assertAcked(prepareCreate("test").setSettings(Settings.builder() .put("index.routing.allocation.exclude.color", "blue") - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(indexSettings()))); + .put(indexSettings()) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) // NORELEASE: set to randomInt(halfNodes - 1) once replica data loss is fixed + )); ensureYellow(); assertAllShardsOnNodes("test", redFuture.get().toArray(new String[2])); int numDocs = randomIntBetween(100, 150); @@ -477,10 +478,12 @@ public class RelocationIT extends ESIntegTestCase { indexRandom(true, docs); numDocs *= 2; - logger.info(" --> waiting for relocation to complete", numDocs); - ensureGreen("test");// move all shards to the new node (it waits on relocation) + logger.info(" --> waiting for relocation to complete"); + ensureGreen("test"); // move all shards to the new nodes (it waits on relocation) + final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { + logger.info(" --> checking iteration {}", i); SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get(); assertNoFailures(afterRelocation); assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()])); @@ -503,7 +506,8 @@ public class RelocationIT extends ESIntegTestCase { if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) { // corrupting the segments_N files in order to make sure future recovery re-send files logger.debug("corrupting [{}] to {}. file name: [{}]", action, node, chunkRequest.name()); - byte[] array = chunkRequest.content().array(); + assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; + byte[] array = chunkRequest.content().toBytesRef().bytes; array[0] = (byte) ~array[0]; // flip one byte in the content corruptionCount.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 3d46c0bbacf..17e2481b7d0 100644 --- a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -46,7 +45,6 @@ import java.util.List; import java.util.stream.Collectors; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.blobId; -import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.parseNameUUIDFromBlobName; import static org.hamcrest.Matchers.equalTo; /** @@ -108,7 +106,90 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { assertThat(snapshotIds, equalTo(originalSnapshots)); } - public void testSnapshotIndexFile() throws Exception { + public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write to and read from a snapshot file with no entries + assertThat(repository.snapshots().size(), equalTo(0)); + repository.writeSnapshotsToIndexGen(Collections.emptyList()); + assertThat(repository.snapshots().size(), equalTo(0)); + + // write to and read from a snapshot file with a random number of entries + final int numSnapshots = randomIntBetween(1, 1000); + final List snapshotIds = new ArrayList<>(numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(repository.snapshots(), equalTo(snapshotIds)); + } + + public void testIndexGenerationalFiles() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write to index generational file + final int numSnapshots = randomIntBetween(1, 1000); + final List snapshotIds = new ArrayList<>(numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(0L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(0L)); + + // adding more and writing to a new index generational file + for (int i = 0; i < 10; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(1L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(1L)); + + // removing a snapshot adn writing to a new index generational file + snapshotIds.remove(0); + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(2L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(2L)); + } + + public void testOldIndexFileFormat() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write old index file format + final int numOldSnapshots = randomIntBetween(1, 50); + final List snapshotIds = new ArrayList<>(); + for (int i = 0; i < numOldSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID)); + } + writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList())); + assertThat(Sets.newHashSet(repository.snapshots()), equalTo(Sets.newHashSet(snapshotIds))); + + // write to and read from a snapshot file with a random number of new entries added + final int numSnapshots = randomIntBetween(1, 1000); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.snapshots()), equalTo(Sets.newHashSet(snapshotIds))); + } + + public void testBlobId() { + SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID); + assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name + snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID); + assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name + String uuid = UUIDs.randomBase64UUID(); + snapshotId = new SnapshotId("abc123", uuid); + assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid + uuid = UUIDs.randomBase64UUID(); + snapshotId = new SnapshotId("abc-123", uuid); + assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid + } + + private BlobStoreRepository setupRepo() { final Client client = client(); final Path location = ESIntegTestCase.randomRepoPath(node().settings()); final String repositoryName = "test-repo"; @@ -123,89 +204,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); @SuppressWarnings("unchecked") final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); - - // write to and read from a snapshot file with no entries - repository.writeSnapshotList(Collections.emptyList()); - List readSnapshotIds = repository.readSnapshotList(); - assertThat(readSnapshotIds.size(), equalTo(0)); - - // write to and read from a snapshot file with a random number of entries - final int numSnapshots = randomIntBetween(1, 1000); - final List snapshotIds = new ArrayList<>(numSnapshots); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotList(snapshotIds); - readSnapshotIds = repository.readSnapshotList(); - assertThat(readSnapshotIds, equalTo(snapshotIds)); - } - - public void testOldIndexFileFormat() throws Exception { - final Client client = client(); - final Path location = ESIntegTestCase.randomRepoPath(node().settings()); - final String repositoryName = "test-repo"; - - PutRepositoryResponse putRepositoryResponse = - client.admin().cluster().preparePutRepository(repositoryName) - .setType("fs") - .setSettings(Settings.builder().put(node().settings()).put("location", location)) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = - (BlobStoreRepository) repositoriesService.repository(repositoryName); - - // write old index file format - final int numOldSnapshots = randomIntBetween(1, 50); - final List snapshotIds = new ArrayList<>(); - for (int i = 0; i < numOldSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID)); - } - writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList())); - List readSnapshotIds = repository.readSnapshotList(); - assertThat(Sets.newHashSet(readSnapshotIds), equalTo(Sets.newHashSet(snapshotIds))); - - // write to and read from a snapshot file with a random number of new entries added - final int numSnapshots = randomIntBetween(1, 1000); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotList(snapshotIds); - readSnapshotIds = repository.readSnapshotList(); - assertThat(Sets.newHashSet(readSnapshotIds), equalTo(Sets.newHashSet(snapshotIds))); - } - - public void testParseUUIDFromBlobName() { - String blobStr = "abc123"; - Tuple pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); // snapshot name - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); // snapshot uuid - blobStr = "abcefghijklmnopqrstuvwxyz"; - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); - blobStr = "abc123-xyz"; // not enough characters after '-' to have a uuid - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); - blobStr = "abc123-a1b2c3d4e5f6g7h8i9j0k1"; - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo("abc123")); - assertThat(pair.v2(), equalTo("a1b2c3d4e5f6g7h8i9j0k1")); - } - - public void testBlobId() { - SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name - snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name - String uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc123", uuid); - assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid - uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc-123", uuid); - assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid + return repository; } private void writeOldFormat(final BlobStoreRepository repository, final List snapshotNames) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index aa3b11e6250..051159b448b 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -36,17 +36,21 @@ import java.io.IOException; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - * - */ public class BytesRestResponseTests extends ESTestCase { + class UnknownException extends Exception { + + public UnknownException(final String message, final Throwable cause) { + super(message, cause); + } + + } + public void testWithHeaders() throws Exception { RestRequest request = new FakeRestRequest(); RestChannel channel = randomBoolean() ? new DetailedExceptionRestChannel(request) : new SimpleExceptionRestChannel(request); @@ -62,9 +66,9 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new SimpleExceptionRestChannel(request); - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("ElasticsearchException[an error occurred reading data]")); assertThat(text, not(containsString("FileNotFoundException"))); assertThat(text, not(containsString("/foo/bar"))); @@ -75,9 +79,9 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}")); assertThat(text, containsString("{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}")); } @@ -86,10 +90,10 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new SimpleExceptionRestChannel(request); - Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); - assertThat(text, not(containsString("Throwable[an error occurred reading data]"))); + String text = response.content().utf8ToString(); + assertThat(text, not(containsString("UnknownException[an error occurred reading data]"))); assertThat(text, not(containsString("FileNotFoundException[/foo/bar]"))); assertThat(text, not(containsString("error_trace"))); assertThat(text, containsString("\"error\":\"No ElasticsearchException found\"")); @@ -100,10 +104,10 @@ public class BytesRestResponseTests extends ESTestCase { request.params().put("error_trace", "true"); RestChannel channel = new DetailedExceptionRestChannel(request); - Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); - assertThat(text, containsString("\"type\":\"throwable\",\"reason\":\"an error occurred reading data\"")); + String text = response.content().utf8ToString(); + assertThat(text, containsString("\"type\":\"unknown_exception\",\"reason\":\"an error occurred reading data\"")); assertThat(text, containsString("{\"type\":\"file_not_found_exception\"")); assertThat(text, containsString("\"stack_trace\":\"[an error occurred reading data]")); } @@ -112,15 +116,15 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); { - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); - BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + Exception e = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + BytesRestResponse response = new BytesRestResponse(channel, e); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"root_cause\":[{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}]")); } { - Throwable t = new FileNotFoundException("/foo/bar"); - BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + Exception e = new FileNotFoundException("/foo/bar"); + BytesRestResponse response = new BytesRestResponse(channel, e); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"root_cause\":[{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}]")); } } @@ -130,7 +134,7 @@ public class BytesRestResponseTests extends ESTestCase { RestChannel channel = new SimpleExceptionRestChannel(request); BytesRestResponse response = new BytesRestResponse(channel, null); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("\"error\":\"unknown\"")); assertThat(text, not(containsString("error_trace"))); } @@ -144,7 +148,7 @@ public class BytesRestResponseTests extends ESTestCase { new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}"; assertEquals(expected.trim(), text.trim()); String stackTrace = ExceptionsHelper.stackTrace(ex); @@ -160,7 +164,7 @@ public class BytesRestResponseTests extends ESTestCase { // if we try to decode the path, this will throw an IllegalArgumentException again final BytesRestResponse response = new BytesRestResponse(channel, e); assertNotNull(response.content()); - final String content = response.content().toUtf8(); + final String content = response.content().utf8ToString(); assertThat(content, containsString("\"type\":\"illegal_argument_exception\"")); assertThat(content, containsString("\"reason\":\"partial escape sequence at end of string: %a\"")); assertThat(content, containsString("\"status\":" + 400)); @@ -171,7 +175,7 @@ public class BytesRestResponseTests extends ESTestCase { final RestChannel channel = new DetailedExceptionRestChannel(request); final BytesRestResponse response = new BytesRestResponse(channel, new ElasticsearchException("simulated")); assertNotNull(response.content()); - final String content = response.content().toUtf8(); + final String content = response.content().utf8ToString(); assertThat(content, containsString("\"type\":\"exception\"")); assertThat(content, containsString("\"reason\":\"simulated\"")); assertThat(content, containsString("\"status\":" + 500)); diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 9cade7aa513..834afe5d5cd 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -78,7 +79,7 @@ public class RestControllerTests extends ESTestCase { } @Override - void executeHandler(RestRequest request, RestChannel channel) throws Exception { + void executeHandler(RestRequest request, RestChannel channel, NodeClient client) throws Exception { assertEquals("true", threadContext.getHeader("header.1")); assertEquals("true", threadContext.getHeader("header.2")); assertNull(threadContext.getHeader("header.3")); @@ -91,7 +92,7 @@ public class RestControllerTests extends ESTestCase { restHeaders.put("header.1", "true"); restHeaders.put("header.2", "true"); restHeaders.put("header.3", "false"); - restController.dispatchRequest(new FakeRestRequest.Builder().withHeaders(restHeaders).build(), null, threadContext); + restController.dispatchRequest(new FakeRestRequest.Builder().withHeaders(restHeaders).build(), null, null, threadContext); assertNull(threadContext.getHeader("header.1")); assertNull(threadContext.getHeader("header.2")); assertEquals("true", threadContext.getHeader("header.3")); @@ -117,7 +118,7 @@ public class RestControllerTests extends ESTestCase { } @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { //no op } diff --git a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index 51f36d1e25f..19b9051dd7e 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -71,16 +72,13 @@ public class RestFilterChainTests extends ESTestCase { } } - restController.registerHandler(RestRequest.Method.GET, "/", new RestHandler() { - @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { - channel.sendResponse(new TestResponse()); - } + restController.registerHandler(RestRequest.Method.GET, "/", (request, channel, client) -> { + channel.sendResponse(new TestResponse()); }); FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, null, new ThreadContext(Settings.EMPTY)); assertThat(fakeRestChannel.await(), equalTo(true)); @@ -117,12 +115,9 @@ public class RestFilterChainTests extends ESTestCase { final int additionalContinueCount = randomInt(10); - TestFilter testFilter = new TestFilter(randomInt(), new Callback() { - @Override - public void execute(final RestRequest request, final RestChannel channel, final RestFilterChain filterChain) throws Exception { - for (int i = 0; i <= additionalContinueCount; i++) { - filterChain.continueProcessing(request, channel); - } + TestFilter testFilter = new TestFilter(randomInt(), (request, channel, client, filterChain) -> { + for (int i = 0; i <= additionalContinueCount; i++) { + filterChain.continueProcessing(request, channel, null); } }); @@ -131,14 +126,14 @@ public class RestFilterChainTests extends ESTestCase { restController.registerHandler(RestRequest.Method.GET, "/", new RestHandler() { @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { channel.sendResponse(new TestResponse()); } }); FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), additionalContinueCount + 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, null, new ThreadContext(Settings.EMPTY)); fakeRestChannel.await(); assertThat(testFilter.runs.get(), equalTo(1)); @@ -147,23 +142,23 @@ public class RestFilterChainTests extends ESTestCase { assertThat(fakeRestChannel.errors().get(), equalTo(additionalContinueCount)); } - private static enum Operation implements Callback { + private enum Operation implements Callback { CONTINUE_PROCESSING { @Override - public void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - filterChain.continueProcessing(request, channel); + public void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { + filterChain.continueProcessing(request, channel, client); } }, CHANNEL_RESPONSE { @Override - public void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { + public void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { channel.sendResponse(new TestResponse()); } } } - private static interface Callback { - void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception; + private interface Callback { + void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception; } private final AtomicInteger counter = new AtomicInteger(); @@ -180,10 +175,10 @@ public class RestFilterChainTests extends ESTestCase { } @Override - public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { + public void process(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { this.runs.incrementAndGet(); this.executionToken = counter.incrementAndGet(); - this.callback.execute(request, channel, filterChain); + this.callback.execute(request, channel, client, filterChain); } @Override diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java new file mode 100644 index 00000000000..d7353dfbe57 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.cat; + +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsTests; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.Table; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.cache.query.QueryCacheStats; +import org.elasticsearch.index.cache.request.RequestCacheStats; +import org.elasticsearch.index.engine.SegmentsStats; +import org.elasticsearch.index.fielddata.FieldDataStats; +import org.elasticsearch.index.flush.FlushStats; +import org.elasticsearch.index.get.GetStats; +import org.elasticsearch.index.merge.MergeStats; +import org.elasticsearch.index.refresh.RefreshStats; +import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.shard.DocsStats; +import org.elasticsearch.index.shard.IndexingStats; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.store.StoreStats; +import org.elasticsearch.index.warmer.WarmerStats; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.search.suggest.completion.CompletionStats; +import org.elasticsearch.test.ESTestCase; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for {@link RestIndicesAction} + */ +public class RestIndicesActionTests extends ESTestCase { + + public void testBuildTable() { + final Settings settings = Settings.EMPTY; + final RestController restController = new RestController(settings); + final RestIndicesAction action = new RestIndicesAction(settings, restController, new IndexNameExpressionResolver(settings)); + + // build a (semi-)random table + final int numIndices = randomIntBetween(0, 5); + Index[] indices = new Index[numIndices]; + for (int i = 0; i < numIndices; i++) { + indices[i] = new Index(randomAsciiOfLength(5), UUIDs.randomBase64UUID()); + } + + final MetaData.Builder metaDataBuilder = MetaData.builder(); + for (final Index index : indices) { + metaDataBuilder.put(IndexMetaData.builder(index.getName()) + .settings(Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())) + .creationDate(System.currentTimeMillis()) + .numberOfShards(1) + .numberOfReplicas(1) + .state(IndexMetaData.State.OPEN)); + } + final MetaData metaData = metaDataBuilder.build(); + + final ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .build(); + final String[] indicesStr = new String[indices.length]; + for (int i = 0; i < indices.length; i++) { + indicesStr[i] = indices[i].getName(); + } + final ClusterHealthResponse clusterHealth = new ClusterHealthResponse( + clusterState.getClusterName().value(), indicesStr, clusterState, 0, 0, 0, TimeValue.timeValueMillis(1000L) + ); + + final Table table = action.buildTable(null, indices, clusterHealth, randomIndicesStatsResponse(indices), metaData); + + // now, verify the table is correct + int count = 0; + List headers = table.getHeaders(); + assertThat(headers.get(count++).value, equalTo("health")); + assertThat(headers.get(count++).value, equalTo("status")); + assertThat(headers.get(count++).value, equalTo("index")); + assertThat(headers.get(count++).value, equalTo("uuid")); + + List> rows = table.getRows(); + assertThat(rows.size(), equalTo(indices.length)); + // TODO: more to verify (e.g. randomize cluster health, num primaries, num replicas, etc) + for (int i = 0; i < rows.size(); i++) { + count = 0; + final List row = rows.get(i); + assertThat(row.get(count++).value, equalTo("red*")); // all are red because cluster state doesn't have routing entries + assertThat(row.get(count++).value, equalTo("open")); // all are OPEN for now + assertThat(row.get(count++).value, equalTo(indices[i].getName())); + assertThat(row.get(count++).value, equalTo(indices[i].getUUID())); + } + } + + private IndicesStatsResponse randomIndicesStatsResponse(final Index[] indices) { + List shardStats = new ArrayList<>(); + for (final Index index : indices) { + for (int i = 0; i < 2; i++) { + ShardId shardId = new ShardId(index, i); + Path path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve(String.valueOf(i)); + ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, null, i == 0, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + shardRouting = shardRouting.initialize("node-0", null, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE); + shardRouting = shardRouting.moveToStarted(); + CommonStats stats = new CommonStats(); + stats.fieldData = new FieldDataStats(); + stats.queryCache = new QueryCacheStats(); + stats.docs = new DocsStats(); + stats.store = new StoreStats(); + stats.indexing = new IndexingStats(); + stats.search = new SearchStats(); + stats.segments = new SegmentsStats(); + stats.merge = new MergeStats(); + stats.refresh = new RefreshStats(); + stats.completion = new CompletionStats(); + stats.requestCache = new RequestCacheStats(); + stats.get = new GetStats(); + stats.flush = new FlushStats(); + stats.warmer = new WarmerStats(); + shardStats.add(new ShardStats(shardRouting, new ShardPath(false, path, path, shardId), stats, null, null)); + } + } + return IndicesStatsTests.newIndicesStatsResponse( + shardStats.toArray(new ShardStats[shardStats.size()]), shardStats.size(), shardStats.size(), 0, emptyList() + ); + } +} diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index b603ded8697..34275d78d7b 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -51,7 +51,7 @@ public class RestRecoveryActionTests extends ESTestCase { public void testRestRecoveryAction() { final Settings settings = Settings.EMPTY; final RestController restController = new RestController(settings); - final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController, null); + final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); final int failedShards = totalShards - successfulShards; diff --git a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java index ebb7dd255aa..ffefa074df7 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java @@ -95,6 +95,6 @@ public class RestMainActionTests extends ESTestCase { } mainResponse.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS); BytesReference xcontentBytes = responseBuilder.bytes(); - assertTrue(BytesReference.Helper.bytesEqual(xcontentBytes, response.content())); + assertEquals(xcontentBytes, response.content()); } } diff --git a/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java b/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java index a7e17785d48..3dfae8cc4f8 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java @@ -169,7 +169,7 @@ public class RestTableTests extends ESTestCase { private void assertResponse(Map headers, String mediaType, String body) throws Exception { RestResponse response = assertResponseContentType(headers, mediaType); - assertThat(response.content().toUtf8(), equalTo(body)); + assertThat(response.content().utf8ToString(), equalTo(body)); } private List getHeaderNames(List headers) { diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 1872ce8f050..9fbd4a81f19 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -55,7 +55,7 @@ public class FileScriptTests extends ESTestCase { .put("script.engine." + MockScriptEngine.NAME + ".file.aggs", "false").build(); ScriptService scriptService = makeScriptService(settings); Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); - CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap(), null); + CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertNotNull(compiledScript); MockCompiledScript executable = (MockCompiledScript) compiledScript.compiled(); assertEquals("script1.mockscript", executable.name); @@ -72,7 +72,7 @@ public class FileScriptTests extends ESTestCase { Script script = new Script("script1", ScriptService.ScriptType.FILE, MockScriptEngine.NAME, null); for (ScriptContext context : ScriptContext.Standard.values()) { try { - scriptService.compile(script, context, Collections.emptyMap(), null); + scriptService.compile(script, context, Collections.emptyMap()); fail(context.getKey() + " script should have been rejected"); } catch(Exception e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [file], operation [" + context.getKey() + "] and lang [" + MockScriptEngine.NAME + "] are disabled")); diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 70cc3588602..1ee4c58455b 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -19,8 +19,13 @@ package org.elasticsearch.script; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -30,13 +35,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; @@ -55,10 +53,9 @@ public class NativeScriptTests extends ESTestCase { List> scriptSettings = scriptModule.getSettings(); scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); ExecutableScript executable = scriptModule.getScriptService().executable( new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), ScriptContext.Standard.SEARCH, - Collections.emptyMap(), state); + Collections.emptyMap()); assertThat(executable.run().toString(), equalTo("test")); } @@ -85,7 +82,7 @@ public class NativeScriptTests extends ESTestCase { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext, - Collections.emptyMap(), null), notNullValue()); + Collections.emptyMap()), notNullValue()); } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 57bdfff3f7f..c7ee421e7e0 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -59,7 +59,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap(), null); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); @@ -71,16 +71,16 @@ public class ScriptContextTests extends ESTestCase { ScriptService scriptService = makeScriptService(); Script script = new Script("1", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, null); try { - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap(), null); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalStateException e) { assertTrue(e.getMessage(), e.getMessage().contains("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [" + MockScriptEngine.NAME + "] are disabled")); } // still works for other script contexts - assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap(), null)); - assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap(), null)); - assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), Collections.emptyMap(), null)); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.AGGS, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap())); + assertNotNull(scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"), Collections.emptyMap())); } public void testUnknownPluginScriptContext() throws Exception { @@ -88,7 +88,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap(), null); + scriptService.compile(script, new ScriptContext.Plugin(PLUGIN_NAME, "unknown"), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [" + PLUGIN_NAME + "_unknown] not supported")); @@ -107,7 +107,7 @@ public class ScriptContextTests extends ESTestCase { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { try { Script script = new Script("1", scriptType, MockScriptEngine.NAME, null); - scriptService.compile(script, context, Collections.emptyMap(), null); + scriptService.compile(script, context, Collections.emptyMap()); fail("script compilation should have been rejected"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage(), e.getMessage().contains("script context [test] not supported")); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 597cb7e6032..43224488480 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -114,9 +114,10 @@ public class ScriptServiceTests extends ESTestCase { private void buildScriptService(Settings additionalSettings) throws IOException { Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build(); Environment environment = new Environment(finalSettings); + // TODO: scriptService = new ScriptService(finalSettings, environment, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) { @Override - String getScriptFromClusterState(ClusterState state, String scriptLang, String id) { + String getScriptFromClusterState(String scriptLang, String id) { //mock the script that gets retrieved from an index return "100"; } @@ -141,7 +142,7 @@ public class ScriptServiceTests extends ESTestCase { resourceWatcherService.notifyNow(); CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, Collections.emptyMap(), emptyClusterState()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file")); Files.delete(testFileNoExt); @@ -150,7 +151,7 @@ public class ScriptServiceTests extends ESTestCase { try { scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH, - Collections.emptyMap(), emptyClusterState()); + Collections.emptyMap()); fail("the script test_script should no longer exist"); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]")); @@ -168,7 +169,7 @@ public class ScriptServiceTests extends ESTestCase { resourceWatcherService.notifyNow(); CompiledScript compiledScript = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), - ScriptContext.Standard.SEARCH, Collections.emptyMap(), emptyClusterState()); + ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file_script")); Files.delete(testHiddenFile); @@ -179,9 +180,9 @@ public class ScriptServiceTests extends ESTestCase { public void testInlineScriptCompiledOnceCache() throws IOException { buildScriptService(Settings.EMPTY); CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), Collections.emptyMap(), emptyClusterState()); + randomFrom(scriptContexts), Collections.emptyMap()); CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), - randomFrom(scriptContexts), Collections.emptyMap(), emptyClusterState()); + randomFrom(scriptContexts), Collections.emptyMap()); assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled())); } @@ -304,7 +305,7 @@ public class ScriptServiceTests extends ESTestCase { String type = scriptEngineService.getType(); try { scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin( - pluginName, unknownContext), Collections.emptyMap(), emptyClusterState()); + pluginName, unknownContext), Collections.emptyMap()); fail("script compilation should have been rejected"); } catch(IllegalArgumentException e) { assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported")); @@ -314,22 +315,20 @@ public class ScriptServiceTests extends ESTestCase { public void testCompileCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), - Collections.emptyMap(), emptyClusterState()); + Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testExecutableCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap(), state); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testSearchCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); scriptService.search(null, new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), - Collections.emptyMap(), state); + Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -339,7 +338,7 @@ public class ScriptServiceTests extends ESTestCase { for (int i = 0; i < numberOfCompilations; i++) { scriptService .compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), - Collections.emptyMap(), emptyClusterState()); + Collections.emptyMap()); } assertEquals(numberOfCompilations, scriptService.stats().getCompilations()); } @@ -349,9 +348,8 @@ public class ScriptServiceTests extends ESTestCase { builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap(), state); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap(), state); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -359,14 +357,14 @@ public class ScriptServiceTests extends ESTestCase { buildScriptService(Settings.EMPTY); createFileScripts("test"); scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), - Collections.emptyMap(), emptyClusterState()); + Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } public void testIndexedScriptCountedInCompilationStats() throws IOException { buildScriptService(Settings.EMPTY); scriptService.compile(new Script("script", ScriptType.STORED, "test", null), randomFrom(scriptContexts), - Collections.emptyMap(), emptyClusterState()); + Collections.emptyMap()); assertEquals(1L, scriptService.stats().getCompilations()); } @@ -375,9 +373,8 @@ public class ScriptServiceTests extends ESTestCase { builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); builder.put("script.inline", "true"); buildScriptService(builder.build()); - ClusterState state = ClusterState.builder(new ClusterName("_name")).build(); - scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap(), state); - scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap(), state); + scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); + scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(2L, scriptService.stats().getCompilations()); assertEquals(1L, scriptService.stats().getCacheEvictions()); } @@ -388,7 +385,7 @@ public class ScriptServiceTests extends ESTestCase { builder.put("script.inline", "true"); buildScriptService(builder.build()); CompiledScript script = scriptService.compile(new Script("1 + 1", ScriptType.INLINE, null, null), - randomFrom(scriptContexts), Collections.emptyMap(), emptyClusterState()); + randomFrom(scriptContexts), Collections.emptyMap()); assertEquals(script.lang(), "test"); } @@ -469,7 +466,7 @@ public class ScriptServiceTests extends ESTestCase { private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { try { - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap(), emptyClusterState()); + scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()); fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]"); } catch(IllegalStateException e) { //all good @@ -477,9 +474,8 @@ public class ScriptServiceTests extends ESTestCase { } private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) { - ClusterState state = emptyClusterState(); assertThat( - scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap(), state), + scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, Collections.emptyMap()), notNullValue() ); } @@ -528,8 +524,6 @@ public class ScriptServiceTests extends ESTestCase { public static final String NAME = "dtest"; - public static final List EXTENSIONS = Collections.unmodifiableList(Arrays.asList("dtest")); - @Override public String getType() { return NAME; @@ -559,9 +553,4 @@ public class ScriptServiceTests extends ESTestCase { public void close() { } } - - private static ClusterState emptyClusterState() { - return ClusterState.builder(new ClusterName("_name")).build(); - } - } diff --git a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java index d8d6b0f5409..658a3bf5658 100644 --- a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java @@ -29,8 +29,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke public class StoredScriptsIT extends ESIntegTestCase { - private final static int SCRIPT_MAX_SIZE_IN_BYTES = 64; - private final static String LANG = MockScriptEngine.NAME; + private static final int SCRIPT_MAX_SIZE_IN_BYTES = 64; + private static final String LANG = MockScriptEngine.NAME; @Override protected Settings nodeSettings(int nodeOrdinal) { diff --git a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 2ca255ea1a3..192f40d4b2b 100644 --- a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -43,13 +43,13 @@ public class DocValueFormatTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.BOOLEAN); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.BOOLEAN, in.readNamedWriteable(DocValueFormat.class)); DocValueFormat.Decimal decimalFormat = new DocValueFormat.Decimal("###.##"); out = new BytesStreamOutput(); out.writeNamedWriteable(decimalFormat); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); DocValueFormat vf = in.readNamedWriteable(DocValueFormat.class); assertEquals(DocValueFormat.Decimal.class, vf.getClass()); assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern); @@ -57,7 +57,7 @@ public class DocValueFormatTests extends ESTestCase { DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1)); out = new BytesStreamOutput(); out.writeNamedWriteable(dateFormat); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); vf = in.readNamedWriteable(DocValueFormat.class); assertEquals(DocValueFormat.DateTime.class, vf.getClass()); assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.format()); @@ -65,17 +65,17 @@ public class DocValueFormatTests extends ESTestCase { out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.GEOHASH); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.GEOHASH, in.readNamedWriteable(DocValueFormat.class)); out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.IP); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.IP, in.readNamedWriteable(DocValueFormat.class)); out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.RAW); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.RAW, in.readNamedWriteable(DocValueFormat.class)); } diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index a4837e382ac..5caba0fb441 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -753,35 +753,35 @@ public class MultiValueModeTests extends ESTestCase { public void testWriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.SUM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.AVG.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MEDIAN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(3)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MAX.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(4)); } } @@ -790,35 +790,35 @@ public class MultiValueModeTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.SUM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.AVG)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MEDIAN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(3); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MIN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(4); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MAX)); } } diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 72873bc0d48..20a557d56a6 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -47,7 +47,7 @@ import static org.hamcrest.Matchers.notNullValue; public class SearchModuleTests extends ModuleTestCase { public void testDoubleRegister() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); try { module.registerHighlighter("fvh", new PlainHighlighter()); } catch (IllegalArgumentException e) { @@ -62,7 +62,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterSuggester() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); module.registerSuggester("custom", CustomSuggester.INSTANCE); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> module.registerSuggester("custom", CustomSuggester.INSTANCE)); @@ -70,7 +70,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterHighlighter() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); CustomHighlighter customHighlighter = new CustomHighlighter(); module.registerHighlighter("custom", customHighlighter); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, @@ -88,14 +88,14 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterQueryParserDuplicate() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> module .registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD)); assertThat(e.getMessage(), containsString("] already registered for [query][term] while trying to register [org.elasticsearch.")); } public void testRegisteredQueries() throws IOException { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); List allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); diff --git a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java index 2bb39ad10ea..6542bad5b8a 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -65,7 +65,7 @@ public class SearchWithRejectionsIT extends ESIntegTestCase { for (int i = 0; i < numSearches; i++) { try { responses[i].get(); - } catch (Throwable t) { + } catch (Exception t) { } } awaitBusy(() -> client().admin().indices().prepareStats().execute().actionGet().getTotal().getSearch().getOpenContexts() == 0, 1, TimeUnit.SECONDS); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index 08109a437cc..58b4b97db5c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -114,12 +114,12 @@ public class AggregatorParsingTests extends ESTestCase { b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); }, settingsModule, - new IndicesModule(namedWriteableRegistry) { + new IndicesModule(namedWriteableRegistry, Collections.emptyList()) { @Override protected void configure() { bindMapperExtension(); } - }, new SearchModule(settings, namedWriteableRegistry) { + }, new SearchModule(settings, namedWriteableRegistry, false) { @Override protected void configureSearch() { // Skip me diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index d953eb02174..ddde1fd9eb6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -145,14 +145,13 @@ public abstract class BaseAggregationTestCase categoryToControl = new HashMap<>(); + private static final Map categoryToControl = new HashMap<>(); @Override public void setupSuiteScopeCluster() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 4f7064a33bb..2c3534183e0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -47,6 +47,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -237,6 +238,46 @@ public class DateHistogramIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(1L)); } + public void testSingleValued_timeZone_epoch() throws Exception { + String format = randomBoolean() ? "epoch_millis" : "epoch_second"; + int millisDivider = format.equals("epoch_millis") ? 1 : 1000; + if (randomBoolean()) { + format = format + "||date_optional_time"; + } + DateTimeZone tz = DateTimeZone.forID("+01:00"); + SearchResponse response = client().prepareSearch("idx") + .addAggregation(dateHistogram("histo").field("date") + .dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1) + .timeZone(tz).format(format)) + .execute() + .actionGet(); + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(6)); + + List expectedKeys = new ArrayList<>(); + expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC)); + expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC)); + expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC)); + expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC)); + expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC)); + expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC)); + + + Iterator keyIterator = expectedKeys.iterator(); + for (Histogram.Bucket bucket : buckets) { + assertThat(bucket, notNullValue()); + DateTime expectedKey = keyIterator.next(); + assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider))); + assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey)); + assertThat(bucket.getDocCount(), equalTo(1L)); + } + } + public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index a48facc4d66..0c8b9a22c37 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -52,9 +52,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase public class FilterIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index a95df3ff5e6..592861ccce2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -58,9 +58,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase public class FiltersIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 8dc015b30ed..59aa7544d10 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -106,7 +106,7 @@ public class SignificanceHeuristicTests extends ESTestCase { ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); StreamInput in = new InputStreamStreamInput(inBuffer); NamedWriteableRegistry registry = new NamedWriteableRegistry(); - new SearchModule(Settings.EMPTY, registry); // populates the registry through side effects + new SearchModule(Settings.EMPTY, registry, false); // populates the registry through side effects in = new NamedWriteableAwareStreamInput(in, registry); in.setVersion(version); sigTerms[1].readFrom(in); @@ -202,7 +202,7 @@ public class SignificanceHeuristicTests extends ESTestCase { // 1. The output of the builders can actually be parsed // 2. The parser does not swallow parameters after a significance heuristic was defined public void testBuilderAndParser() throws Exception { - SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false); ParseFieldRegistry heuristicParserMapper = searchModule.getSignificanceHeuristicParserRegistry(); SearchContext searchContext = new SignificantTermsTestSearchContext(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java index 36c4caae12d..97d5cf1f9ee 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java @@ -38,14 +38,14 @@ public class PercentilesMethodTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { PercentilesMethod.TDIGEST.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { PercentilesMethod.HDR.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -54,13 +54,13 @@ public class PercentilesMethodTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.TDIGEST)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.HDR)); } } @@ -69,7 +69,7 @@ public class PercentilesMethodTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { PercentilesMethod.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java index e1441b0dc54..390501d2002 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; import static org.hamcrest.Matchers.equalTo; @@ -51,7 +50,7 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase SearchSourceBuilder.fromXContent(createParseContext(parser), aggParsers, suggesters)); + assertThat(e, hasToString(containsString("unit is missing or unrecognized"))); + } + } + public void testEmptyPostFilter() throws IOException { SearchSourceBuilder builder = new SearchSourceBuilder(); String query = "{ \"post_filter\": {} }"; diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 39df053c862..b8e572b9dcc 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -105,7 +105,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { } } - public final static class TermVectorsFetchSubPhase implements FetchSubPhase { + public static final class TermVectorsFetchSubPhase implements FetchSubPhase { public static final ContextFactory CONTEXT_FACTORY = new ContextFactory() { diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 1418f476e00..155363f72a7 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -594,9 +594,9 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source( searchSource().query( functionScoreQuery(QueryBuilders.matchAllQuery(), new FilterFunctionBuilder[]{ - new FilterFunctionBuilder(linearDecayFunction("num1", null, "1000w")), + new FilterFunctionBuilder(linearDecayFunction("num1", null, "7000d")), new FilterFunctionBuilder(gaussDecayFunction("num1", null, "1d")), - new FilterFunctionBuilder(exponentialDecayFunction("num1", null, "1000w")) + new FilterFunctionBuilder(exponentialDecayFunction("num1", null, "7000d")) }).scoreMode(FiltersFunctionScoreQuery.ScoreMode.MULTIPLY)))); SearchResponse sr = response.actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 14d0fc959c3..4ba1b902fef 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -355,7 +355,7 @@ public class QueryRescorerIT extends ESIntegTestCase { // and shard id are equal during merging shard results. // This comparator uses a custom tie in case the scores are equal, so that both regular hits and rescored hits // are sorted equally. This is fine since tests only care about the fact the scores should be equal, not ordering. - private final static Comparator searchHitsComparator = new Comparator() { + private static final Comparator searchHitsComparator = new Comparator() { @Override public int compare(SearchHit hit1, SearchHit hit2) { int cmp = Float.compare(hit2.getScore(), hit1.getScore()); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java index 7da18342edf..067c2dcac05 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoShapeQueryTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.geo; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.settings.Settings; import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; @@ -54,6 +56,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; public class GeoShapeQueryTests extends ESSingleNodeTestCase { @@ -197,6 +200,30 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1")); } + public void testIndexedShapeReferenceSourceDisabled() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject() + .startObject("properties") + .startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject() + .endObject() + .endObject(); + client().admin().indices().prepareCreate("test").addMapping("type1", mapping).get(); + createIndex("shapes", Settings.EMPTY, "shape_type", "_source", "enabled=false"); + ensureGreen(); + + ShapeBuilder shape = ShapeBuilders.newEnvelope(new Coordinate(-45, 45), new Coordinate(45, -45)); + + client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject() + .field("shape", shape).endObject()).setRefreshPolicy(IMMEDIATE).get(); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch("test").setTypes("type1") + .setQuery(geoIntersectionQuery("location", "Big_Rectangle", "shape_type")).get()); + assertThat(e.getRootCause(), instanceOf(IllegalArgumentException.class)); + assertThat(e.getRootCause().getMessage(), containsString("source disabled")); + } + public void testReusableBuilder() throws IOException { ShapeBuilder polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() .coordinate(170, -10).coordinate(190, -10).coordinate(190, 10).coordinate(170, 10).close()) diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 9ebbb5b42e0..1e3c5453fd7 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -83,7 +83,7 @@ public class HighlightBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry(); } @AfterClass @@ -484,14 +484,14 @@ public class HighlightBuilderTests extends ESTestCase { public void testOrderSerialization() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { Order.NONE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { Order.SCORE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -738,7 +738,7 @@ public class HighlightBuilderTests extends ESTestCase { private static HighlightBuilder serializedCopy(HighlightBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new HighlightBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index 77fc2f0e6a9..dedd47d3e43 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -67,7 +67,7 @@ public class InternalSearchHitTests extends ESTestCase { context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); BytesStreamOutput output = new BytesStreamOutput(); hits.writeTo(output, context); - InputStream input = new ByteArrayInputStream(output.bytes().toBytes()); + InputStream input = output.bytes().streamInput(); context = new InternalSearchHits.StreamContext(); context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); InternalSearchHits results = InternalSearchHits.readSearchHits(new InputStreamStreamInput(input), context); diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java index ef7a4ecc7ce..300c4f141b0 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java @@ -362,8 +362,8 @@ public class MatchedQueriesIT extends ESIntegTestCase { refresh(); QueryBuilder[] queries = new QueryBuilder[]{ - wrapperQuery(matchQuery("content", "amet").queryName("abc").buildAsBytes().toUtf8()), - constantScoreQuery(wrapperQuery(termQuery("content", "amet").queryName("abc").buildAsBytes().toUtf8())) + wrapperQuery(matchQuery("content", "amet").queryName("abc").buildAsBytes().utf8ToString()), + constantScoreQuery(wrapperQuery(termQuery("content", "amet").queryName("abc").buildAsBytes().utf8ToString())) }; for (QueryBuilder query : queries) { SearchResponse searchResponse = client().prepareSearch() diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 875256a0f92..766aff8d274 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -623,7 +623,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("ultimate1")); } - private static final void assertEquivalent(String query, SearchResponse left, SearchResponse right) { + private static void assertEquivalent(String query, SearchResponse left, SearchResponse right) { assertNoFailures(left); assertNoFailures(right); SearchHits leftHits = left.getHits(); diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 8cb21dadb9b..1d54b412d6c 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1148,6 +1148,7 @@ public class SearchQueryIT extends ESIntegTestCase { jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("arr").startObject("properties").startObject("term").field("type", "text") .endObject().endObject().endObject().endObject().endObject().endObject())); + assertAcked(prepareCreate("lookup3").addMapping("type", "_source", "enabled=false", "terms","type=text")); assertAcked(prepareCreate("test").addMapping("type", "term", "type=text")); indexRandom(true, @@ -1172,6 +1173,7 @@ public class SearchQueryIT extends ESIntegTestCase { .startObject().field("term", "4").endObject() .endArray() .endObject()), + client().prepareIndex("lookup3", "type", "1").setSource("terms", new String[]{"1", "3"}), client().prepareIndex("test", "type", "1").setSource("term", "1"), client().prepareIndex("test", "type", "2").setSource("term", "2"), client().prepareIndex("test", "type", "3").setSource("term", "3"), @@ -1227,6 +1229,16 @@ public class SearchQueryIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test") .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))).get(); assertHitCount(searchResponse, 0L); + + // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms + searchResponse = client().prepareSearch("test") + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup", "type", "missing", "terms"))).get(); + assertHitCount(searchResponse, 0L); + + // index "lookup3" type "type" has the source disabled: ignore the lookup terms + searchResponse = client().prepareSearch("test") + .setQuery(termsLookupQuery("term" , new TermsLookup("lookup3", "type", "1", "terms"))).get(); + assertHitCount(searchResponse, 0L); } public void testBasicQueryById() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index f965d3ac5fd..28eb56bdcaf 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -70,7 +70,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry(); } @AfterClass @@ -340,7 +340,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { private static RescoreBuilder serializedCopy(RescoreBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(original); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return in.readNamedWriteable(RescoreBuilder.class); } } diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 3c675926328..88af1ddf2b6 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -65,7 +65,7 @@ public class SearchAfterBuilderTests extends ESTestCase { indicesQueriesRegistry = null; } - private final SearchAfterBuilder randomSearchFromBuilder() throws IOException { + private SearchAfterBuilder randomSearchFromBuilder() throws IOException { int numSearchFrom = randomIntBetween(1, 10); SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder(); Object[] values = new Object[numSearchFrom]; @@ -112,7 +112,7 @@ public class SearchAfterBuilderTests extends ESTestCase { // ensure that every number type remain the same before/after xcontent (de)serialization. // This is not a problem because the final type of each field value is extracted from associated sort field. // This little trick ensure that equals and hashcode are the same when using the xcontent serialization. - private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { + private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { int numSearchAfter = randomIntBetween(1, 10); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); jsonBuilder.startObject(); @@ -164,7 +164,7 @@ public class SearchAfterBuilderTests extends ESTestCase { private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new SearchAfterBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 217f97ace0a..668351c6db9 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -88,7 +88,7 @@ public class SliceBuilderTests extends ESTestCase { indicesQueriesRegistry = null; } - private final SliceBuilder randomSliceBuilder() throws IOException { + private SliceBuilder randomSliceBuilder() throws IOException { int max = randomIntBetween(2, MAX_SLICE); int id = randomInt(max - 1); String field = randomAsciiOfLengthBetween(5, 20); @@ -99,7 +99,7 @@ public class SliceBuilderTests extends ESTestCase { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = - new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new SliceBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 2ac7fd68e2e..8d46372aa42 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -100,13 +100,13 @@ public abstract class AbstractSortTestCase> extends EST scriptService = new ScriptService(baseSettings, environment, new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) { @Override - public CompiledScript compile(Script script, ScriptContext scriptContext, Map params, ClusterState state) { + public CompiledScript compile(Script script, ScriptContext scriptContext, Map params) { return new CompiledScript(ScriptType.INLINE, "mockName", "test", script); } }; namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry(); } @AfterClass @@ -272,7 +272,7 @@ public abstract class AbstractSortTestCase> extends EST private T copyItem(T original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return (T) namedWriteableRegistry.getReader(SortBuilder.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index f31158ff34e..54b12216302 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -52,7 +52,7 @@ public class SortBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false).getQueryParserRegistry(); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java index 2de48decbd8..208b4ed1b53 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java @@ -37,7 +37,7 @@ public class SortOrderTests extends ESTestCase { for (SortOrder unit : SortOrder.values()) { try (BytesStreamOutput out = new BytesStreamOutput()) { unit.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat("Roundtrip serialisation failed.", SortOrder.readFromStream(in), equalTo(unit)); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 1f4030f487c..b67036e1152 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -56,7 +56,7 @@ public abstract class AbstractSuggestionBuilderTestCase extends ESTestCase { private M copyModel(M original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritableRegistry())) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), provideNamedWritableRegistry())) { return readFrom(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 3fd3850b98a..846d3193f6d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -206,7 +206,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ private static DirectCandidateGeneratorBuilder serializedCopy(DirectCandidateGeneratorBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = StreamInput.wrap(output.bytes())) { + try (StreamInput in = output.bytes().streamInput()) { return new DirectCandidateGeneratorBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index f167eefa43d..c7b883b583b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -180,7 +180,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { static SmoothingModel copyModel(SmoothingModel original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return namedWriteableRegistry.getReader(SmoothingModel.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 50fb3f9074b..bbd1bec1d45 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -279,8 +279,8 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("failed to execute [{}]", t, source); + public void onFailure(String source, Exception e) { + logger.warn("failed to execute [{}]", e, source); } }); diff --git a/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java b/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java index b2b9e780205..e1589b4cd2f 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java @@ -138,7 +138,7 @@ public class BlobStoreFormatIT extends AbstractSnapshotIntegTestCase { private BytesReference write(T obj) throws IOException { try (BytesStreamOutput bytesStreamOutput = new BytesStreamOutput()) { if (compress) { - try (StreamOutput compressedStreamOutput = CompressorFactory.defaultCompressor().streamOutput(bytesStreamOutput)) { + try (StreamOutput compressedStreamOutput = CompressorFactory.COMPRESSOR.streamOutput(bytesStreamOutput)) { write(obj, compressedStreamOutput); } } else { diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 3ba6c875b68..9d571c02c90 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -271,7 +272,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest } @Override - public void onFailure(String source, @Nullable Throwable t) { + public void onFailure(String source, @Nullable Exception e) { countDownLatch.countDown(); } @@ -283,8 +284,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest countDownLatch.await(); } - private static interface ClusterStateUpdater { - public ClusterState execute(ClusterState currentState) throws Exception; + private interface ClusterStateUpdater { + ClusterState execute(ClusterState currentState) throws Exception; } public void testSnapshotDuringNodeShutdown() throws Exception { @@ -391,8 +392,11 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> making sure that snapshot no longer exists"); assertThrows(client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").execute(), SnapshotMissingException.class); - // Subtract index file from the count - assertThat("not all files were deleted during snapshot cancellation", numberOfFilesBeforeSnapshot, equalTo(numberOfFiles(repo) - 1)); + // Subtract three files that will remain in the repository: + // (1) index-1 + // (2) index-0 (because we keep the previous version) and + // (3) index-latest + assertThat("not all files were deleted during snapshot cancellation", numberOfFilesBeforeSnapshot, equalTo(numberOfFiles(repo) - 3)); logger.info("--> done"); } @@ -635,6 +639,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest .put(MockRepository.Plugin.PASSWORD_SETTING.getKey(), "verysecretpassword") ).get(); + NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); RestGetRepositoriesAction getRepoAction = internalCluster().getInstance(RestGetRepositoriesAction.class); RestRequest getRepoRequest = new FakeRestRequest(); getRepoRequest.params().put("repository", "test-repo"); @@ -644,14 +649,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest @Override public void sendResponse(RestResponse response) { try { - assertThat(response.content().toUtf8(), containsString("notsecretusername")); - assertThat(response.content().toUtf8(), not(containsString("verysecretpassword"))); + assertThat(response.content().utf8ToString(), containsString("notsecretusername")); + assertThat(response.content().utf8ToString(), not(containsString("verysecretpassword"))); } catch (AssertionError ex) { getRepoError.set(ex); } getRepoLatch.countDown(); } - }); + }, nodeClient); assertTrue(getRepoLatch.await(1, TimeUnit.SECONDS)); if (getRepoError.get() != null) { throw getRepoError.get(); @@ -665,14 +670,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest @Override public void sendResponse(RestResponse response) { try { - assertThat(response.content().toUtf8(), containsString("notsecretusername")); - assertThat(response.content().toUtf8(), not(containsString("verysecretpassword"))); + assertThat(response.content().utf8ToString(), containsString("notsecretusername")); + assertThat(response.content().utf8ToString(), not(containsString("verysecretpassword"))); } catch (AssertionError ex) { clusterStateError.set(ex); } clusterStateLatch.countDown(); } - }); + }, nodeClient); assertTrue(clusterStateLatch.await(1, TimeUnit.SECONDS)); if (clusterStateError.get() != null) { throw clusterStateError.get(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java index 79d1497912a..48a1cc6081e 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java @@ -40,6 +40,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -153,7 +154,9 @@ public class RepositoriesIT extends AbstractSnapshotIntegTestCase { .get(); fail("Shouldn't be here"); } catch (RepositoryException ex) { - assertThat(ex.toString(), containsString("unsupported url protocol [netdoc]")); + assertThat(ex.toString(), + either(containsString("unsupported url protocol [netdoc]")) + .or(containsString("unknown protocol: netdoc"))); // newer versions of JDK 9 } logger.info("--> trying creating url repository with location that is not registered in path.repo setting"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 19b46710fea..f6049002852 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -818,8 +818,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> delete the last snapshot"); client.admin().cluster().prepareDeleteSnapshot("test-repo", lastSnapshot).get(); - logger.info("--> make sure that number of files is back to what it was when the first snapshot was made"); - assertThat(numberOfFiles(repo), equalTo(numberOfFiles[0])); + logger.info("--> make sure that number of files is back to what it was when the first snapshot was made, " + + "plus one because one backup index-N file should remain"); + assertThat(numberOfFiles(repo), equalTo(numberOfFiles[0] + 1)); } public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exception { @@ -2041,7 +2042,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java index 38d858c49aa..c178b2a6f83 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -75,7 +76,7 @@ public class SnapshotRequestsTests extends ESTestCase { builder.endArray(); } - byte[] bytes = builder.endObject().bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); request.source(bytes); @@ -134,7 +135,7 @@ public class SnapshotRequestsTests extends ESTestCase { builder.endArray(); } - byte[] bytes = builder.endObject().bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); request.source(bytes); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java index cb297785e4b..41cfa3d4141 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java @@ -20,12 +20,10 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.nio.ByteBuffer; import static org.hamcrest.CoreMatchers.equalTo; @@ -50,8 +48,7 @@ public class SnapshotTests extends ESTestCase { final Snapshot original = new Snapshot(randomAsciiOfLength(randomIntBetween(2, 8)), snapshotId); final BytesStreamOutput out = new BytesStreamOutput(); original.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - assertThat(new Snapshot(in), equalTo(original)); + assertThat(new Snapshot(out.bytes().streamInput()), equalTo(original)); } } diff --git a/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java b/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java index bfbb2dff4c7..5b507436129 100644 --- a/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java +++ b/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java @@ -37,7 +37,7 @@ import java.util.Map; import java.util.TreeMap; /** - * Round trip tests for {@link PersistedTaskInfo} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}. + * Round trip tests for {@link PersistedTaskInfo} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}. */ public class PersistedTaskInfoTests extends ESTestCase { public void testBinaryRoundTrip() throws IOException { @@ -47,7 +47,7 @@ public class PersistedTaskInfoTests extends ESTestCase { PersistedTaskInfo read; try (BytesStreamOutput out = new BytesStreamOutput()) { result.writeTo(out); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { read = new PersistedTaskInfo(in); } } catch (IOException e) { diff --git a/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java b/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java index b13de26b976..f7990cfacb7 100644 --- a/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java +++ b/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java @@ -57,7 +57,7 @@ public class TaskIdTests extends ESTestCase { taskId.writeTo(out); BytesReference bytes = out.bytes(); assertEquals(expectedSize, bytes.length()); - try (StreamInput in = StreamInput.wrap(bytes)) { + try (StreamInput in = bytes.streamInput()) { return TaskId.readFromStream(in); } } diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java index 9e4a881b25b..8f10ccd6537 100644 --- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java +++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java @@ -34,7 +34,7 @@ import static org.hamcrest.MatcherAssert.assertThat; */ public class MockLogAppender extends AppenderSkeleton { - private final static String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch."); + private static final String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch."); private List expectations; @@ -75,7 +75,7 @@ public class MockLogAppender extends AppenderSkeleton { void assertMatched(); } - public static abstract class AbstractEventExpectation implements LoggingExpectation { + public abstract static class AbstractEventExpectation implements LoggingExpectation { protected final String name; protected final String logger; protected final Level level; diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java index 3193aaf458e..c36082f1475 100644 --- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java +++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java @@ -20,7 +20,7 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.discovery.Discovery; @@ -41,7 +41,7 @@ public class NoopDiscovery implements Discovery { } @Override - public void setRoutingService(RoutingService routingService) { + public void setAllocationService(AllocationService allocationService) { } @@ -86,17 +86,11 @@ public class NoopDiscovery implements Discovery { } @Override - public Discovery start() { - return null; - } + public void start() {} @Override - public Discovery stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} } diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 897fa44b593..20c82e6f518 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -20,16 +20,9 @@ package org.elasticsearch.test.geo; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import org.locationtech.spatial4j.context.jts.JtsSpatialContext; -import org.locationtech.spatial4j.distance.DistanceUtils; -import org.locationtech.spatial4j.exception.InvalidShapeException; -import org.locationtech.spatial4j.shape.Point; -import org.locationtech.spatial4j.shape.Rectangle; -import org.locationtech.spatial4j.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.builders.CoordinateCollection; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; @@ -42,6 +35,12 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.search.geo.GeoShapeQueryTests; import org.junit.Assert; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.impl.Range; import java.util.Random; @@ -230,14 +229,10 @@ public class RandomShapeGenerator extends RandomGeoGenerator { // The validate flag will check for these possibilities and bail if an incorrect geometry is created try { pgb.build(); - } catch (Throwable e) { + } catch (AssertionError | InvalidShapeException e) { // jts bug may occasionally misinterpret coordinate order causing an unhelpful ('geom' assertion) // or InvalidShapeException - if (e instanceof InvalidShapeException || e instanceof AssertionError) { - return null; - } - // throw any other exception - throw e; + return null; } } return pgb; diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 28267e9beb7..974929dddf2 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.threadpool; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -36,7 +34,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.hamcrest.RegexMatcher; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.tribe.TribeIT; import java.io.IOException; @@ -46,19 +43,11 @@ import java.lang.management.ThreadMXBean; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.Executor; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; /** */ @@ -136,9 +125,9 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { try { new Node(settings); fail("The node startup is supposed to fail"); - } catch(Throwable t) { + } catch(Exception e) { //all good - assertThat(t.getMessage(), containsString("mandatory plugins [non_existing]")); + assertThat(e.getMessage(), containsString("mandatory plugins [non_existing]")); } } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 486b0635c64..14cf10b8f31 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -58,7 +58,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); @@ -70,7 +70,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); @@ -125,7 +125,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 4fa99b3b80c..9ae029a4aa4 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -324,16 +324,13 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testVoidMessageCompressed() { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, - new TransportRequestHandler() { - @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel) { - try { - TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); - channel.sendResponse(TransportResponse.Empty.INSTANCE, responseOptions); - } catch (IOException e) { - logger.error("Unexpected failure", e); - fail(e.getMessage()); - } + (request, channel) -> { + try { + TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); + channel.sendResponse(TransportResponse.Empty.INSTANCE, responseOptions); + } catch (IOException e) { + logger.error("Unexpected failure", e); + fail(e.getMessage()); } }); diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java index a5e2efb9f38..e9ec3787afc 100644 --- a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.transport; import org.apache.http.message.BasicHeader; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -45,6 +44,7 @@ import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.test.ESIntegTestCase; @@ -61,6 +61,7 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; +import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; @@ -284,15 +285,16 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(randomHeaderKey, randomHeaderValue)); } - public static class ActionLoggingPlugin extends Plugin { + public static class ActionLoggingPlugin extends Plugin implements ActionPlugin { @Override public Collection nodeModules() { return Collections.singletonList(new ActionLoggingModule()); } - public void onModule(ActionModule module) { - module.registerFilter(LoggingFilter.class); + @Override + public List> getActionFilters() { + return singletonList(LoggingFilter.class); } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyPublishPortTests.java b/core/src/test/java/org/elasticsearch/transport/PublishPortTests.java similarity index 94% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyPublishPortTests.java rename to core/src/test/java/org/elasticsearch/transport/PublishPortTests.java index 6f602dafc99..ffe7a2d7ce2 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyPublishPortTests.java +++ b/core/src/test/java/org/elasticsearch/transport/PublishPortTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.transport.BindTransportException; -import org.elasticsearch.transport.TransportSettings; import java.net.InetSocketAddress; import java.net.UnknownHostException; @@ -32,11 +30,11 @@ import java.util.List; import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.elasticsearch.transport.netty.NettyTransport.resolvePublishPort; +import static org.elasticsearch.transport.TcpTransport.resolvePublishPort; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class NettyPublishPortTests extends ESTestCase { +public class PublishPortTests extends ESTestCase { public void testPublishPort() throws Exception { int boundPort = randomIntBetween(9000, 9100); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java similarity index 78% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyTransportTests.java rename to core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java index a5bd6612cdf..da1dcf43e5d 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java @@ -17,17 +17,17 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; -/** Unit tests for NettyTransport */ -public class NettyTransportTests extends ESTestCase { - +/** Unit tests for TCPTransport */ +public class TCPTransportTests extends ESTestCase { + /** Test ipv4 host with a default port works */ public void testParseV4DefaultPort() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("127.0.0.1", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1", "1234", Integer.MAX_VALUE); assertEquals(1, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); @@ -36,19 +36,19 @@ public class NettyTransportTests extends ESTestCase { /** Test ipv4 host with a default port range works */ public void testParseV4DefaultRange() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("127.0.0.1", "1234-1235", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1", "1234-1235", Integer.MAX_VALUE); assertEquals(2, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); assertEquals(1234, addresses[0].getPort()); - + assertEquals("127.0.0.1", addresses[1].getAddress()); assertEquals(1235, addresses[1].getPort()); } /** Test ipv4 host with port works */ public void testParseV4WithPort() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("127.0.0.1:2345", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1:2345", "1234", Integer.MAX_VALUE); assertEquals(1, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); @@ -57,7 +57,7 @@ public class NettyTransportTests extends ESTestCase { /** Test ipv4 host with port range works */ public void testParseV4WithPortRange() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("127.0.0.1:2345-2346", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("127.0.0.1:2345-2346", "1234", Integer.MAX_VALUE); assertEquals(2, addresses.length); assertEquals("127.0.0.1", addresses[0].getAddress()); @@ -70,7 +70,7 @@ public class NettyTransportTests extends ESTestCase { /** Test unbracketed ipv6 hosts in configuration fail. Leave no ambiguity */ public void testParseV6UnBracketed() throws Exception { try { - NettyTransport.parse("::1", "1234", Integer.MAX_VALUE); + TcpTransport.parse("::1", "1234", Integer.MAX_VALUE); fail("should have gotten exception"); } catch (IllegalArgumentException expected) { assertTrue(expected.getMessage().contains("must be bracketed")); @@ -79,7 +79,7 @@ public class NettyTransportTests extends ESTestCase { /** Test ipv6 host with a default port works */ public void testParseV6DefaultPort() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("[::1]", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]", "1234", Integer.MAX_VALUE); assertEquals(1, addresses.length); assertEquals("::1", addresses[0].getAddress()); @@ -88,19 +88,19 @@ public class NettyTransportTests extends ESTestCase { /** Test ipv6 host with a default port range works */ public void testParseV6DefaultRange() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("[::1]", "1234-1235", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]", "1234-1235", Integer.MAX_VALUE); assertEquals(2, addresses.length); assertEquals("::1", addresses[0].getAddress()); assertEquals(1234, addresses[0].getPort()); - + assertEquals("::1", addresses[1].getAddress()); assertEquals(1235, addresses[1].getPort()); } /** Test ipv6 host with port works */ public void testParseV6WithPort() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("[::1]:2345", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]:2345", "1234", Integer.MAX_VALUE); assertEquals(1, addresses.length); assertEquals("::1", addresses[0].getAddress()); @@ -109,7 +109,7 @@ public class NettyTransportTests extends ESTestCase { /** Test ipv6 host with port range works */ public void testParseV6WithPortRange() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("[::1]:2345-2346", "1234", Integer.MAX_VALUE); + TransportAddress[] addresses = TcpTransport.parse("[::1]:2345-2346", "1234", Integer.MAX_VALUE); assertEquals(2, addresses.length); assertEquals("::1", addresses[0].getAddress()); @@ -118,10 +118,10 @@ public class NettyTransportTests extends ESTestCase { assertEquals("::1", addresses[1].getAddress()); assertEquals(2346, addresses[1].getPort()); } - + /** Test per-address limit */ public void testAddressLimit() throws Exception { - TransportAddress[] addresses = NettyTransport.parse("[::1]:100-200", "1000", 3); + TransportAddress[] addresses = TcpTransport.parse("[::1]:100-200", "1000", 3); assertEquals(3, addresses.length); assertEquals(100, addresses[0].getPort()); assertEquals(101, addresses[1].getPort()); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java b/core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java new file mode 100644 index 00000000000..bc10458378f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.jboss.netty.buffer.ChannelBuffer; +import org.jboss.netty.buffer.ChannelBuffers; + +import java.io.IOException; + +public class ChannelBufferBytesReferenceTests extends AbstractBytesReferenceTestCase { + @Override + protected BytesReference newBytesReference(int length) throws IOException { + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(out.size(), length); + BytesReference ref = out.bytes(); + assertEquals(ref.length(), length); + BytesRef bytesRef = ref.toBytesRef(); + final ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, bytesRef.length); + return NettyUtils.toBytesReference(channelBuffer); + } + + public void testSliceOnAdvancedBuffer() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); + BytesRef bytesRef = bytesReference.toBytesRef(); + ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); + int numBytesToRead = randomIntBetween(1, 5); + for (int i = 0; i < numBytesToRead; i++) { + channelBuffer.readByte(); + } + BytesReference other = NettyUtils.toBytesReference(channelBuffer); + BytesReference slice = bytesReference.slice(numBytesToRead, bytesReference.length() - numBytesToRead); + assertEquals(other, slice); + assertEquals(other.slice(3, 1), slice.slice(3, 1)); + } + + public void testImmutable() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); + BytesRef bytesRef = BytesRef.deepCopyOf(bytesReference.toBytesRef()); + ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); + ChannelBufferBytesReference channelBufferBytesReference = new ChannelBufferBytesReference(channelBuffer, bytesRef.length); + assertEquals(channelBufferBytesReference, bytesReference); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(channelBufferBytesReference, bytesReference); + assertEquals(bytesRef, channelBufferBytesReference.toBytesRef()); + + BytesRef unicodeBytes = new BytesRef(randomUnicodeOfCodepointLength(100)); + channelBuffer = ChannelBuffers.wrappedBuffer(unicodeBytes.bytes, unicodeBytes.offset, unicodeBytes.length); + channelBufferBytesReference = new ChannelBufferBytesReference(channelBuffer, unicodeBytes.length); + String utf8ToString = channelBufferBytesReference.utf8ToString(); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(utf8ToString, channelBufferBytesReference.utf8ToString()); + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java index df7dcb0714b..c69f56c2cbd 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.transport.netty; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasables; @@ -33,6 +32,7 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -56,7 +56,7 @@ public class NettyScheduledPingTests extends ESTestCase { ThreadPool threadPool = new TestThreadPool(getClass().getName()); Settings settings = Settings.builder() - .put(NettyTransport.PING_SCHEDULE.getKey(), "5ms") + .put(TcpTransport.PING_SCHEDULE.getKey(), "5ms") .put(TransportSettings.PORT.getKey(), 0) .put("cluster.name", "test") .build(); @@ -89,12 +89,12 @@ public class NettyScheduledPingTests extends ESTestCase { assertBusy(new Runnable() { @Override public void run() { - assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(100L)); - assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(100L)); + assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(100L)); + assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(100L)); } }); - assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0L)); - assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0L)); + assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @@ -137,15 +137,12 @@ public class NettyScheduledPingTests extends ESTestCase { }).txGet(); } - assertBusy(new Runnable() { - @Override - public void run() { - assertThat(nettyA.scheduledPing.successfulPings.count(), greaterThan(200L)); - assertThat(nettyB.scheduledPing.successfulPings.count(), greaterThan(200L)); - } + assertBusy(() -> { + assertThat(nettyA.getPing().getSuccessfulPings(), greaterThan(200L)); + assertThat(nettyB.getPing().getSuccessfulPings(), greaterThan(200L)); }); - assertThat(nettyA.scheduledPing.failedPings.count(), equalTo(0L)); - assertThat(nettyB.scheduledPing.failedPings.count(), equalTo(0L)); + assertThat(nettyA.getPing().getFailedPings(), equalTo(0L)); + assertThat(nettyB.getPing().getFailedPings(), equalTo(0L)); Releasables.close(serviceA, serviceB); terminate(threadPool); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index 2f89435c6df..310f804ef7c 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -44,6 +44,7 @@ import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import java.io.IOException; +import java.net.InetSocketAddress; import java.util.Collection; import java.util.Collections; @@ -98,45 +99,24 @@ public class NettyTransportIT extends ESIntegTestCase { super(settings, threadPool, networkService, bigArrays, namedWriteableRegistry, circuitBreakerService); } + protected String handleRequest(Channel channel, String profileName, + StreamInput stream, long requestId, int messageLengthBytes, Version version, + InetSocketAddress remoteAddress) throws IOException { + String action = super.handleRequest(channel, profileName, stream, requestId, messageLengthBytes, version, + remoteAddress); + channelProfileName = TransportSettings.DEFAULT_PROFILE; + return action; + } + @Override - public ChannelPipelineFactory configureServerChannelPipelineFactory(String name, Settings groupSettings) { - return new ErrorPipelineFactory(this, name, groupSettings); - } - - private static class ErrorPipelineFactory extends ServerChannelPipelineFactory { - - private final ESLogger logger; - - public ErrorPipelineFactory(ExceptionThrowingNettyTransport nettyTransport, String name, Settings groupSettings) { - super(nettyTransport, name, groupSettings); - this.logger = nettyTransport.logger; - } - - @Override - public ChannelPipeline getPipeline() throws Exception { - ChannelPipeline pipeline = super.getPipeline(); - pipeline.replace("dispatcher", "dispatcher", - new MessageChannelHandler(nettyTransport, logger, TransportSettings.DEFAULT_PROFILE) { - - @Override - protected String handleRequest(Channel channel, Marker marker, StreamInput buffer, long requestId, - int messageLengthBytes, Version version) throws IOException { - String action = super.handleRequest(channel, marker, buffer, requestId, messageLengthBytes, version); - channelProfileName = this.profileName; - return action; - } - - @Override - protected void validateRequest(Marker marker, StreamInput buffer, long requestId, String action) throws IOException { - super.validateRequest(marker, buffer, requestId, action); - String error = threadPool.getThreadContext().getHeader("ERROR"); - if (error != null) { - throw new ElasticsearchException(error); - } - } - }); - return pipeline; + protected void validateRequest(StreamInput buffer, long requestId, String action) + throws IOException { + super.validateRequest(buffer, requestId, action); + String error = threadPool.getThreadContext().getHeader("ERROR"); + if (error != null) { + throw new ElasticsearchException(error); } } + } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java index 6fdc214d18d..352c90d2317 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.transport.netty; -import org.elasticsearch.Version; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; @@ -30,6 +29,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.junit.Before; @@ -58,7 +58,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { .build(); ThreadPool threadPool = new TestThreadPool("tst"); - try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + try (TcpTransport transport = startTransport(settings, threadPool)) { assertEquals(1, transport.profileBoundAddresses().size()); assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { @@ -74,7 +74,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { .build(); ThreadPool threadPool = new TestThreadPool("tst"); - try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + try (TcpTransport transport = startTransport(settings, threadPool)) { assertEquals(1, transport.profileBoundAddresses().size()); assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { @@ -91,7 +91,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { .build(); ThreadPool threadPool = new TestThreadPool("tst"); - try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + try (TcpTransport transport = startTransport(settings, threadPool)) { assertEquals(0, transport.profileBoundAddresses().size()); assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { @@ -107,7 +107,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { .build(); ThreadPool threadPool = new TestThreadPool("tst"); - try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + try (TcpTransport transport = startTransport(settings, threadPool)) { assertEquals(0, transport.profileBoundAddresses().size()); assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { @@ -125,7 +125,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { .build(); ThreadPool threadPool = new TestThreadPool("tst"); - try (NettyTransport transport = startNettyTransport(settings, threadPool)) { + try (TcpTransport transport = startTransport(settings, threadPool)) { assertEquals(0, transport.profileBoundAddresses().size()); assertEquals(1, transport.boundAddress().boundAddresses().length); } finally { @@ -133,14 +133,13 @@ public class NettyTransportMultiPortTests extends ESTestCase { } } - private NettyTransport startNettyTransport(Settings settings, ThreadPool threadPool) { + private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); - - NettyTransport nettyTransport = new NettyTransport(settings, threadPool, new NetworkService(settings), bigArrays, + TcpTransport transport = new NettyTransport(settings, threadPool, new NetworkService(settings), bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); - nettyTransport.start(); + transport.start(); - assertThat(nettyTransport.lifecycleState(), is(Lifecycle.State.STARTED)); - return nettyTransport; + assertThat(transport.lifecycleState(), is(Lifecycle.State.STARTED)); + return transport; } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java new file mode 100644 index 00000000000..aa1db4a44c7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.jboss.netty.buffer.ChannelBuffer; +import org.jboss.netty.buffer.ChannelBuffers; +import org.jboss.netty.buffer.CompositeChannelBuffer; + +import java.io.IOException; + +public class NettyUtilsTests extends ESTestCase { + + private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; + private final BigArrays bigarrays = new BigArrays(null, new NoneCircuitBreakerService(), false); + + public void testToChannelBufferWithEmptyRef() throws IOException { + ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(getRandomizedBytesReference(0)); + assertSame(ChannelBuffers.EMPTY_BUFFER, channelBuffer); + } + + public void testToChannelBufferWithSlice() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + int sliceOffset = randomIntBetween(0, ref.length()); + int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); + BytesReference slice = ref.slice(sliceOffset, sliceLength); + ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(slice); + BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); + assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(bytesReference)); + } + + public void testToChannelBufferWithSliceAfter() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + int sliceOffset = randomIntBetween(0, ref.length()); + int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); + ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(ref); + BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); + assertArrayEquals(BytesReference.toBytes(ref.slice(sliceOffset, sliceLength)), + BytesReference.toBytes(bytesReference.slice(sliceOffset, sliceLength))); + } + + public void testToChannelBuffer() throws IOException { + BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); + ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(ref); + BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); + if (ref instanceof ChannelBufferBytesReference) { + assertEquals(channelBuffer, ((ChannelBufferBytesReference) ref).toChannelBuffer()); + } else if (AbstractBytesReferenceTestCase.getNumPages(ref) > 1) { // we gather the buffers into a channel buffer + assertTrue(channelBuffer instanceof CompositeChannelBuffer); + } + assertArrayEquals(BytesReference.toBytes(ref), BytesReference.toBytes(bytesReference)); + } + + private BytesReference getRandomizedBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(length, bigarrays); + for (int i = 0; i < length; i++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(out.size(), length); + BytesReference ref = out.bytes(); + assertEquals(ref.length(), length); + if (randomBoolean()) { + return new BytesArray(ref.toBytesRef()); + } else if (randomBoolean()) { + BytesRef bytesRef = ref.toBytesRef(); + return NettyUtils.toBytesReference(ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length)); + } else { + return ref; + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 2bbedd8784b..f361496c537 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -27,7 +27,7 @@ public class TribeServiceTests extends ESTestCase { Settings globalSettings = Settings.builder() .put("node.name", "nodename") .put("path.home", "some/path").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("some/path", clientSettings.get("path.home")); assertEquals("nodename/tribe1", clientSettings.get("node.name")); assertEquals("tribe1", clientSettings.get("tribe.name")); @@ -35,7 +35,9 @@ public class TribeServiceTests extends ESTestCase { assertEquals("false", clientSettings.get("node.master")); assertEquals("false", clientSettings.get("node.data")); assertEquals("false", clientSettings.get("node.ingest")); - assertEquals(7, clientSettings.getAsMap().size()); + assertEquals("false", clientSettings.get("node.local_storage")); + assertEquals("3707202549613653169", clientSettings.get("node.id.seed")); // should be fixed by the parent id and tribe name + assertEquals(9, clientSettings.getAsMap().size()); } public void testEnvironmentSettings() { @@ -45,7 +47,7 @@ public class TribeServiceTests extends ESTestCase { .put("path.conf", "conf/path") .put("path.scripts", "scripts/path") .put("path.logs", "logs/path").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("some/path", clientSettings.get("path.home")); assertEquals("conf/path", clientSettings.get("path.conf")); assertEquals("scripts/path", clientSettings.get("path.scripts")); @@ -54,7 +56,7 @@ public class TribeServiceTests extends ESTestCase { Settings tribeSettings = Settings.builder() .put("path.home", "alternate/path").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, tribeSettings); }); assertTrue(e.getMessage(), e.getMessage().contains("Setting [path.home] not allowed in tribe client")); } @@ -69,7 +71,7 @@ public class TribeServiceTests extends ESTestCase { .put("transport.host", "3.3.3.3") .put("transport.bind_host", "4.4.4.4") .put("transport.publish_host", "5.5.5.5").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("0.0.0.0", clientSettings.get("network.host")); assertEquals("1.1.1.1", clientSettings.get("network.bind_host")); assertEquals("2.2.2.2", clientSettings.get("network.publish_host")); @@ -85,7 +87,7 @@ public class TribeServiceTests extends ESTestCase { .put("transport.host", "6.6.6.6") .put("transport.bind_host", "7.7.7.7") .put("transport.publish_host", "8.8.8.8").build(); - clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, tribeSettings); assertEquals("3.3.3.3", clientSettings.get("network.host")); assertEquals("4.4.4.4", clientSettings.get("network.bind_host")); assertEquals("5.5.5.5", clientSettings.get("network.publish_host")); diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index 9e08ecde6fa..583d8a0288d 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -59,7 +59,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope= Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) public class SimpleTTLIT extends ESIntegTestCase { - static private final long PURGE_INTERVAL = 200; + private static final long PURGE_INTERVAL = 200; @Override protected int numberOfShards() { diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 55834c181b0..c8cacbc36c3 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -65,11 +65,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -800,7 +797,7 @@ public class UpdateIT extends ESIntegTestCase { final CountDownLatch latch = new CountDownLatch(numberOfThreads); final CountDownLatch startLatch = new CountDownLatch(1); final int numberOfUpdatesPerThread = scaledRandomIntBetween(100, 500); - final List failures = new CopyOnWriteArrayList<>(); + final List failures = new CopyOnWriteArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { Runnable r = new Runnable() { @@ -832,7 +829,7 @@ public class UpdateIT extends ESIntegTestCase { logger.warn("Test was forcefully stopped. Client [{}] may still have outstanding requests.", Thread.currentThread().getName()); failures.add(e); Thread.currentThread().interrupt(); - } catch (Throwable e) { + } catch (Exception e) { failures.add(e); } finally { latch.countDown(); @@ -900,7 +897,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } @@ -922,7 +919,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } @@ -976,7 +973,7 @@ public class UpdateIT extends ESIntegTestCase { } } } - } catch (Throwable e) { + } catch (Exception e) { logger.error("Something went wrong", e); failures.add(e); } finally { diff --git a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index b4b5eefc832..f14d91465f6 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -240,12 +240,12 @@ public class UpdateNoopIT extends ESIntegTestCase { private void updateAndCheckSource(long expectedVersion, Boolean detectNoop, XContentBuilder xContentBuilder) { UpdateResponse updateResponse = update(detectNoop, expectedVersion, xContentBuilder); - assertEquals(updateResponse.getGetResult().sourceRef().toUtf8(), xContentBuilder.bytes().toUtf8()); + assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), xContentBuilder.bytes().utf8ToString()); } private UpdateResponse update(Boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") - .setDoc(xContentBuilder.bytes().toUtf8()) + .setDoc(xContentBuilder.bytes().utf8ToString()) .setDocAsUpsert(true) .setFields("_source"); if (detectNoop != null) { diff --git a/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java b/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java index b2cc794ac6c..e2c572f783a 100644 --- a/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java +++ b/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java @@ -53,7 +53,7 @@ public class ConcurrentDocumentOperationIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.error("Unexpected exception while indexing", e); failure.set(e); latch.countDown(); diff --git a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 2356395219f..74b910bf2b4 100644 --- a/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/core/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -220,7 +220,7 @@ public class SimpleVersioningIT extends ESIntegTestCase { fail("did not hit expected exception"); } catch (IllegalArgumentException iae) { // expected - assertTrue(iae.getMessage().contains("Failed to parse setting [index.gc_deletes] with value [42] as a time value: unit is missing or unrecognized")); + assertTrue(iae.getMessage().contains("failed to parse setting [index.gc_deletes] with value [42] as a time value: unit is missing or unrecognized")); } } diff --git a/dev-tools/smoke_test_rc.py b/dev-tools/smoke_test_rc.py index 259081ddc10..33abbf96345 100644 --- a/dev-tools/smoke_test_rc.py +++ b/dev-tools/smoke_test_rc.py @@ -63,7 +63,7 @@ DEFAULT_PLUGINS = ["analysis-icu", "analysis-phonetic", "analysis-smartcn", "analysis-stempel", - "discovery-azure", + "discovery-azure-classic", "discovery-ec2", "discovery-gce", "ingest-attachment", diff --git a/distribution/licenses/netty-3.10.5.Final.jar.sha1 b/distribution/licenses/netty-3.10.5.Final.jar.sha1 deleted file mode 100644 index 6f190752e9e..00000000000 --- a/distribution/licenses/netty-3.10.5.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ca7d55d246092bddd29b867706e2f6c7db701a0 diff --git a/distribution/licenses/netty-3.10.6.Final.jar.sha1 b/distribution/licenses/netty-3.10.6.Final.jar.sha1 new file mode 100644 index 00000000000..35872846658 --- /dev/null +++ b/distribution/licenses/netty-3.10.6.Final.jar.sha1 @@ -0,0 +1 @@ +18ed04a0e502896552854926e908509db2987a00 \ No newline at end of file diff --git a/docs/build.gradle b/docs/build.gradle index 660755a1c65..41e3e352b8d 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -30,6 +30,8 @@ integTest { configFile 'scripts/my_script.py' configFile 'userdict_ja.txt' configFile 'KeywordTokenizer.rbbi' + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' } } @@ -81,3 +83,15 @@ Closure setupTwitter = { String name, int count -> } setupTwitter('twitter', 5) setupTwitter('big_twitter', 120) + +buildRestTests.setups['host'] = ''' + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} +''' diff --git a/docs/java-api/admin/cluster/health.asciidoc b/docs/java-api/admin/cluster/health.asciidoc index 7d20fdde6a3..615a011cf72 100644 --- a/docs/java-api/admin/cluster/health.asciidoc +++ b/docs/java-api/admin/cluster/health.asciidoc @@ -14,7 +14,7 @@ String clusterName = healths.getClusterName(); <2> int numberOfDataNodes = healths.getNumberOfDataNodes(); <3> int numberOfNodes = healths.getNumberOfNodes(); <4> -for (ClusterIndexHealth health : healths) { <5> +for (ClusterIndexHealth health : healths.getIndices().values()) { <5> String index = health.getIndex(); <6> int numberOfShards = health.getNumberOfShards(); <7> int numberOfReplicas = health.getNumberOfReplicas(); <8> diff --git a/docs/java-api/docs/update-by-query.asciidoc b/docs/java-api/docs/update-by-query.asciidoc new file mode 100644 index 00000000000..a94899668ef --- /dev/null +++ b/docs/java-api/docs/update-by-query.asciidoc @@ -0,0 +1,225 @@ +[[docs-update-by-query]] +== Update By Query API + +experimental[The update-by-query API is new and should still be considered experimental. The API may change in ways that are not backwards compatible] + +The simplest usage of `updateByQuery` updates each +document in an index without changing the source. This usage enables +<> or another online +mapping change. + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("source_index").abortOnVersionConflict(false); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +Calls to the `updateByQuery` API start by getting a snapshot of the index, indexing +any documents found using the `internal` versioning. + +NOTE: Version conflicts happen when a document changes between the time of the +snapshot and the time the index request processes. + +When the versions match, `updateByQuery` updates the document +and increments the version number. + +All update and query failures cause `updateByQuery` to abort. These failures are +available from the `BulkIndexByScrollResponse#getIndexingFailures` method. Any +successful updates remain and are not rolled back. While the first failure +causes the abort, the response contains all of the failures generated by the +failed bulk request. + +To prevent version conflicts from causing `updateByQuery` to abort, set +`abortOnVersionConflict(false)`. The first example does this because it is +trying to pick up an online mapping change and a version conflict means that +the conflicting document was updated between the start of the `updateByQuery` +and the time when it attempted to update the document. This is fine because +that update will have picked up the online mapping update. + +The `UpdateByQueryRequestBuilder` API supports filtering the updated documents, +limiting the total number of documents to update, and updating documents +with a script: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("source_index") + .filter(termQuery("level", "awesome")) + .size(1000) + .script(new Script("ctx._source.awesome = 'absolutely'", ScriptType.INLINE, "painless", emptyMap())); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +`UpdateByQueryRequestBuilder` also enables direct access to the query used +to select the documents. You can use this access to change the default scroll size or +otherwise modify the request for matching documents. + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("source_index") + .source().setSize(500); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +You can also combine `size` with sorting to limit the documents updated: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("source_index").size(100) + .source().addSort("cat", SortOrder.DESC); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +In addition to changing the `_source` field for the document, you can use a +script to change the action, similar to the Update API: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("source_index") + .script(new Script( + "if (ctx._source.awesome == 'absolutely) {" + + " ctx.op='noop' + + "} else if (ctx._source.awesome == 'lame') {" + + " ctx.op='delete'" + + "} else {" + + "ctx._source.awesome = 'absolutely'}", ScriptType.INLINE, "painless", emptyMap())); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +As in the <>, you can set the value of `ctx.op` to change the +operation that executes: + +`noop`:: + +Set `ctx.op = "noop"` if your script doesn't make any +changes. The `updateByQuery` operaton then omits that document from the updates. +This behavior increments the `noop` counter in the +<>. + +`delete`:: + +Set `ctx.op = "delete"` if your script decides that the document must be +deleted. The deletion will be reported in the `deleted` counter in the +<>. + +Setting `ctx.op` to any other value generates an error. Setting any +other field in `ctx` generates an error. + +This API doesn't allow you to move the documents it touches, just modify their +source. This is intentional! We've made no provisions for removing the document +from its original location. + +You can also perform these operations on multiple indices and types at once, similar to the search API: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source("foo", "bar").source().setTypes("a", "b"); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +If you provide a `routing` value then the process copies the routing value to the scroll query, +limiting the process to the shards that match that routing value: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.source().setRouting("cat"); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +`updateByQuery` can also use the <> feature by +specifying a `pipeline` like this: + +[source,java] +-------------------------------------------------- +UpdateByQueryRequestBuilder updateByQuery = UpdateByQueryAction.INSTANCE.newRequestBuilder(client); + +updateByQuery.setPipeline("hurray"); + +BulkIndexByScrollResponse response = updateByQuery.get(); +-------------------------------------------------- + +[float] +[[docs-update-by-query-task-api]] +=== Works with the Task API + +You can fetch the status of all running update-by-query requests with the +<>: + +[source,java] +-------------------------------------------------- +ListTasksResponse tasksList = client.admin().cluster().prepareListTasks() + .setActions(UpdateByQueryAction.NAME).setDetailed(true).get(); + +for (TaskInfo info: tasksList.getTasks()) { + TaskId taskId = info.getTaskId(); + BulkByScrollTask.Status status = (BulkByScrollTask.Status) info.getStatus(); + // do stuff +} + +-------------------------------------------------- + +With the `TaskId` shown above you can look up the task directly: + +// provide API Example +[source,java] +-------------------------------------------------- +GetTaskResponse get = client.admin().cluster().prepareGetTask(taskId).get(); +-------------------------------------------------- + +[float] +[[docs-update-by-query-cancel-task-api]] +=== Works with the Cancel Task API + +Any Update By Query can be canceled using the <>: + +[source,java] +-------------------------------------------------- +// Cancel all update-by-query requests +client.admin().cluster().prepareCancelTasks().setActions(UpdateByQueryAction.NAME).get().getTasks() +// Cancel a specific update-by-query request +client.admin().cluster().prepareCancelTasks().setTaskId(taskId).get().getTasks() +-------------------------------------------------- + +Use the `list tasks` API to find the value of `taskId`. + +Cancelling a request is typically a very fast process but can take up to a few seconds. +The task status API continues to list the task until the cancellation is complete. + +[float] +[[docs-update-by-query-rethrottle]] +=== Rethrottling + +Use the `_rethrottle` API to change the value of `requests_per_second` on a running update: + +[source,java] +-------------------------------------------------- +RethrottleAction.INSTANCE.newRequestBuilder(client).setTaskId(taskId).setRequestsPerSecond(2.0f).get(); +-------------------------------------------------- + +Use the `list tasks` API to find the value of `taskId`. + +As with the `updateByQuery` API, the value of `requests_per_second` +can be any positive float value to set the level of the throttle, or `Float.POSITIVE_INFINITY` to disable throttling. +A value of `requests_per_second` that speeds up the process takes +effect immediately. `requests_per_second` values that slow the query take effect +after completing the current batch in order to prevent scroll timeouts. diff --git a/docs/java-api/indexed-scripts.asciidoc b/docs/java-api/indexed-scripts.asciidoc index 45d19ae662d..a1259649a77 100644 --- a/docs/java-api/indexed-scripts.asciidoc +++ b/docs/java-api/indexed-scripts.asciidoc @@ -8,20 +8,20 @@ and delete indexed scripts and templates. [source,java] -------------------------------------------------- PutIndexedScriptResponse = client.preparePutIndexedScript() - .setScriptLang("groovy") + .setScriptLang("painless") .setId("script1") .setSource("script", "_score * doc['my_numeric_field'].value") .execute() .actionGet(); GetIndexedScriptResponse = client.prepareGetIndexedScript() - .setScriptLang("groovy") + .setScriptLang("painless") .setId("script1") .execute() .actionGet(); DeleteIndexedScriptResponse = client.prepareDeleteIndexedScript() - .setScriptLang("groovy") + .setScriptLang("painless") .setId("script1") .execute() .actionGet(); @@ -33,4 +33,4 @@ To store templates simply use "mustache" for the scriptLang. The API allows one to set the language of the indexed script being interacted with. If one is not provided the default scripting language -will be used. \ No newline at end of file +will be used. diff --git a/docs/java-api/query-dsl/script-query.asciidoc b/docs/java-api/query-dsl/script-query.asciidoc index 534c803ab08..33786b693d2 100644 --- a/docs/java-api/query-dsl/script-query.asciidoc +++ b/docs/java-api/query-dsl/script-query.asciidoc @@ -12,11 +12,11 @@ QueryBuilder qb = scriptQuery( <1> inlined script -If you have stored on each data node a script named `mygroovyscript.groovy` with: +If you have stored on each data node a script named `myscript.painless` with: -[source,groovy] +[source,js] -------------------------------------------------- -doc['num1'].value > param1 +doc['num1'].value > params.param1 -------------------------------------------------- You can use it then with: @@ -25,9 +25,9 @@ You can use it then with: -------------------------------------------------- QueryBuilder qb = scriptQuery( new Script( - "mygroovyscript", <1> + "myscript", <1> ScriptType.FILE, <2> - "groovy", <3> + "painless", <3> ImmutableMap.of("param1", 5)) <4> ); -------------------------------------------------- diff --git a/docs/plugins/discovery-azure.asciidoc b/docs/plugins/discovery-azure-classic.asciidoc similarity index 92% rename from docs/plugins/discovery-azure.asciidoc rename to docs/plugins/discovery-azure-classic.asciidoc index e3bdc83e470..d794adbea7e 100644 --- a/docs/plugins/discovery-azure.asciidoc +++ b/docs/plugins/discovery-azure-classic.asciidoc @@ -1,9 +1,13 @@ -[[discovery-azure]] -=== Azure Discovery Plugin +[[discovery-azure-classic]] +=== Azure Classic Discovery Plugin -The Azure Discovery plugin uses the Azure API for unicast discovery. +The Azure Classic Discovery plugin uses the Azure Classic API for unicast discovery. -[[discovery-azure-install]] +// TODO: Link to ARM plugin when ready +// See issue https://github.com/elastic/elasticsearch/issues/19146 +deprecated[5.0.0, Use coming Azure ARM Discovery plugin instead] + +[[discovery-azure-classic-install]] [float] ==== Installation @@ -11,13 +15,13 @@ This plugin can be installed using the plugin manager: [source,sh] ---------------------------------------------------------------- -sudo bin/elasticsearch-plugin install discovery-azure +sudo bin/elasticsearch-plugin install discovery-azure-classic ---------------------------------------------------------------- The plugin must be installed on every node in the cluster, and each node must be restarted after installation. -[[discovery-azure-remove]] +[[discovery-azure-classic-remove]] [float] ==== Removal @@ -25,12 +29,12 @@ The plugin can be removed with the following command: [source,sh] ---------------------------------------------------------------- -sudo bin/elasticsearch-plugin remove discovery-azure +sudo bin/elasticsearch-plugin remove discovery-azure-classic ---------------------------------------------------------------- The node must be stopped before removing the plugin. -[[discovery-azure-usage]] +[[discovery-azure-classic-usage]] ==== Azure Virtual Machine Discovery Azure VM discovery allows to use the azure APIs to perform automatic discovery (similar to multicast in non hostile @@ -56,13 +60,15 @@ discovery: .Binding the network host ============================================== +The keystore file must be placed in a directory accessible by elasticsearch like the `config` directory. + It's important to define `network.host` as by default it's bound to `localhost`. -You can use {ref}/modules-network.html[core network host settings]. For example `_non_loopback_` or `_en0_`. +You can use {ref}/modules-network.html[core network host settings]. For example `_en0_`. ============================================== -[[discovery-azure-short]] +[[discovery-azure-classic-short]] ===== How to start (short story) * Create Azure instances @@ -71,7 +77,7 @@ You can use {ref}/modules-network.html[core network host settings]. For example * Modify `elasticsearch.yml` file * Start Elasticsearch -[[discovery-azure-settings]] +[[discovery-azure-classic-settings]] ===== Azure credential API settings The following are a list of settings that can further control the credential API: @@ -98,7 +104,7 @@ The following are a list of settings that can further control the credential API your_azure_cloud_service_name -[[discovery-azure-settings-advanced]] +[[discovery-azure-classic-settings-advanced]] ===== Advanced settings The following are a list of settings that can further control the discovery: @@ -141,7 +147,7 @@ discovery: slot: production ---- -[[discovery-azure-long]] +[[discovery-azure-classic-long]] ==== Setup process for Azure Discovery We will expose here one strategy which is to hide our Elasticsearch cluster from outside. @@ -151,7 +157,7 @@ other. That means that with this mode, you can use elasticsearch unicast discovery to build a cluster, using the Azure API to retrieve information about your nodes. -[[discovery-azure-long-prerequisites]] +[[discovery-azure-classic-long-prerequisites]] ===== Prerequisites Before starting, you need to have: @@ -241,7 +247,7 @@ azure account download azure account import /tmp/azure.publishsettings ---- -[[discovery-azure-long-instance]] +[[discovery-azure-classic-long-instance]] ===== Creating your first instance You need to have a storage account available. Check http://www.windowsazure.com/en-us/develop/net/how-to-guides/blob-storage/#create-account[Azure Blob Storage documentation] @@ -394,7 +400,7 @@ This command should give you a JSON result: } ---- -[[discovery-azure-long-plugin]] +[[discovery-azure-classic-long-plugin]] ===== Install elasticsearch cloud azure plugin [source,sh] @@ -403,7 +409,7 @@ This command should give you a JSON result: sudo service elasticsearch stop # Install the plugin -sudo /usr/share/elasticsearch/bin/elasticsearch-plugin install discovery-azure +sudo /usr/share/elasticsearch/bin/elasticsearch-plugin install discovery-azure-classic # Configure it sudo vi /etc/elasticsearch/elasticsearch.yml @@ -439,7 +445,7 @@ sudo service elasticsearch start If anything goes wrong, check your logs in `/var/log/elasticsearch`. -[[discovery-azure-scale]] +[[discovery-azure-classic-scale]] ==== Scaling Out! You need first to create an image of your previous machine. diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 17659d496b8..224080c522c 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -105,19 +105,18 @@ cloud: The `cloud.aws.region` can be set to a region and will automatically use the relevant settings for both `ec2` and `s3`. The available values are: -* `us-east` (`us-east-1`) -* `us-west` (`us-west-1`) -* `us-west-1` -* `us-west-2` -* `ap-southeast` (`ap-southeast-1`) -* `ap-southeast-1` -* `ap-southeast-2` -* `ap-northeast` (`ap-northeast-1`) -* `ap-northeast-2` (`ap-northeast-2`) -* `eu-west` (`eu-west-1`) -* `eu-central` (`eu-central-1`) -* `sa-east` (`sa-east-1`) -* `cn-north` (`cn-north-1`) +* `us-east` (`us-east-1`) for US East (N. Virginia) +* `us-west` (`us-west-1`) for US West (N. California) +* `us-west-2` for US West (Oregon) +* `ap-south-1` for Asia Pacific (Mumbai) +* `ap-southeast` (`ap-southeast-1`) for Asia Pacific (Singapore) +* `ap-southeast-2` for Asia Pacific (Sydney) +* `ap-northeast` (`ap-northeast-1`) for Asia Pacific (Tokyo) +* `ap-northeast-2` (`ap-northeast-2`) for Asia Pacific (Seoul) +* `eu-west` (`eu-west-1`) for EU (Ireland) +* `eu-central` (`eu-central-1`) for EU (Frankfurt) +* `sa-east` (`sa-east-1`) for South America (São Paulo) +* `cn-north` (`cn-north-1`) for China (Beijing) [[discovery-ec2-usage-signer]] ===== EC2 Signer API diff --git a/docs/plugins/discovery.asciidoc b/docs/plugins/discovery.asciidoc index 62c5b4551ac..999bf9c0e1f 100644 --- a/docs/plugins/discovery.asciidoc +++ b/docs/plugins/discovery.asciidoc @@ -13,9 +13,9 @@ The core discovery plugins are: The EC2 discovery plugin uses the https://github.com/aws/aws-sdk-java[AWS API] for unicast discovery. -<>:: +<>:: -The Azure discovery plugin uses the Azure API for unicast discovery. +The Azure Classic discovery plugin uses the Azure Classic API for unicast discovery. <>:: @@ -33,7 +33,7 @@ A number of discovery plugins have been contributed by our community: include::discovery-ec2.asciidoc[] -include::discovery-azure.asciidoc[] +include::discovery-azure-classic.asciidoc[] include::discovery-gce.asciidoc[] diff --git a/docs/plugins/ingest-useragent.asciidoc b/docs/plugins/ingest-useragent.asciidoc new file mode 100644 index 00000000000..8868a4cf711 --- /dev/null +++ b/docs/plugins/ingest-useragent.asciidoc @@ -0,0 +1,74 @@ +[[ingest-useragent]] +=== Ingest Useragent Processor Plugin + +The Useragent processor extracts details from the user agent string a browser sends with its web requests. +This processor adds this information by default under the `useragent` field. + +The ingest-useragent plugin ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see https://github.com/ua-parser/uap-core. + +[[ingest-useragent-install]] +[float] +==== Installation + +This plugin can be installed using the plugin manager: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin install ingest-useragent +---------------------------------------------------------------- + +The plugin must be installed on every node in the cluster, and each node must +be restarted after installation. + +[[ingest-useragent-remove]] +[float] +==== Removal + +The plugin can be removed with the following command: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin remove ingest-useragent +---------------------------------------------------------------- + +The node must be stopped before removing the plugin. + +[[using-ingest-useragent]] +==== Using the Useragent Processor in a Pipeline + +[[ingest-useragent-options]] +.Useragent options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field containing the user agent string. +| `target_field` | no | useragent | The field that will be filled with the user agent details. +| `regex_file` | no | - | The name of the file in the `config/ingest-useragent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-useragent will use the regexes.yaml from uap-core it ships with (see below). +| `properties` | no | [`name`, `major`, `minor`, `patch`, `build`, `os`, `os_name`, `os_major`, `os_minor`, `device`] | Controls what properties are added to `target_field`. +|====== + +Here is an example that adds the user agent details to the `useragent` field based on the `agent` field: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "useragent" : { + "field" : "agent" + } + } + ] +} +-------------------------------------------------- + +===== Using a custom regex file +To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-useragent` directory and +has to have a `.yaml` filename extension. The file has to be present at node startup, any changes to it or any new files added +while the node is running will not have any effect. + +In practice, it will make most sense for any custom regex file to be a variant of the default file, either a more recent version +or a customised version. + +The default file included in `ingest-useragent` is the `regexes.yaml` from uap-core: https://github.com/ua-parser/uap-core/blob/master/regexes.yaml diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index 800a640890a..df16d7eb857 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -52,7 +52,8 @@ PUT my_index -------------------------- // CONSOLE -The value of the `_size` field is accessible in queries: +The value of the `_size` field is accessible in queries, aggregations, scripts, +and when sorting: [source,js] -------------------------- @@ -75,6 +76,26 @@ GET my_index/_search "gt": 10 } } + }, + "aggs": { + "sizes": { + "terms": { + "field": "_size", <2> + "size": 10 + } + } + }, + "sort": [ + { + "_size": { <3> + "order": "desc" + } + } + ], + "script_fields": { + "size": { + "script": "doc['_size']" <4> + } } } -------------------------- @@ -82,3 +103,7 @@ GET my_index/_search // TEST[continued] <1> Querying on the `_size` field +<2> Aggregating on the `_size` field +<3> Sorting on the `_size` field +<4> Accessing the `_size` field in scripts (inline scripts must be modules-security-scripting.html#enable-dynamic-scripting[enabled] for this example to work) + diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 1e21288e39c..987cc7c9758 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -51,7 +51,7 @@ sudo bin/elasticsearch-plugin install analysis-icu ----------------------------------- This command will install the version of the plugin that matches your -Elasticsearch version. +Elasticsearch version and also show a progress bar while downloading. [float] === Custom URL or file system @@ -117,8 +117,8 @@ The `plugin` scripts supports a number of other command line parameters: === Silent/Verbose mode The `--verbose` parameter outputs more debug information, while the `--silent` -parameter turns off all output. The script may return the following exit -codes: +parameter turns off all output including the progress bar. The script may +return the following exit codes: [horizontal] `0`:: everything was OK diff --git a/docs/plugins/redirects.asciidoc b/docs/plugins/redirects.asciidoc index c8cf10c6319..0f9c0b40f2c 100644 --- a/docs/plugins/redirects.asciidoc +++ b/docs/plugins/redirects.asciidoc @@ -24,7 +24,7 @@ The `cloud-aws` plugin has been split into two separate plugins: The `cloud-azure` plugin has been split into two separate plugins: -* <> (`discovery-azure`) +* <> (`discovery-azure-classic`) * <> (`repository-azure`) diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 63e3ad311b2..e1f07f6c8e2 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -110,19 +110,18 @@ The `cloud.aws.region` can be set to a region and will automatically use the rel You can specifically set it for s3 only using `cloud.aws.s3.region`. The available values are: -* `us-east` (`us-east-1`) -* `us-west` (`us-west-1`) -* `us-west-1` -* `us-west-2` -* `ap-southeast` (`ap-southeast-1`) -* `ap-southeast-1` -* `ap-southeast-2` -* `ap-northeast` (`ap-northeast-1`) -* `ap-northeast-2` (`ap-northeast-2`) -* `eu-west` (`eu-west-1`) -* `eu-central` (`eu-central-1`) -* `sa-east` (`sa-east-1`) -* `cn-north` (`cn-north-1`) +* `us-east` (`us-east-1`) for US East (N. Virginia) +* `us-west` (`us-west-1`) for US West (N. California) +* `us-west-2` for US West (Oregon) +* `ap-south-1` for Asia Pacific (Mumbai) +* `ap-southeast` (`ap-southeast-1`) for Asia Pacific (Singapore) +* `ap-southeast-2` for Asia Pacific (Sydney) +* `ap-northeast` (`ap-northeast-1`) for Asia Pacific (Tokyo) +* `ap-northeast-2` (`ap-northeast-2`) for Asia Pacific (Seoul) +* `eu-west` (`eu-west-1`) for EU (Ireland) +* `eu-central` (`eu-central-1`) for EU (Frankfurt) +* `sa-east` (`sa-east-1`) for South America (São Paulo) +* `cn-north` (`cn-north-1`) for China (Beijing) [[repository-s3-usage-signer]] ===== S3 Signer API diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 29ba5e49626..4d9147067ec 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -26,8 +26,9 @@ Requesting bucket intervals of a month. Available expressions for interval: `year`, `quarter`, `month`, `week`, `day`, `hour`, `minute`, `second` - -Fractional values are allowed for seconds, minutes, hours, days and weeks. For example 1.5 hours: +Time values can also be specified via abbreviations supported by <> parsing. +Note that fractional time values are not supported, but you can address this by shifting to another +time unit (e.g., `1.5h` could instead be specified as `90m`). [source,js] -------------------------------------------------- @@ -36,15 +37,13 @@ Fractional values are allowed for seconds, minutes, hours, days and weeks. For e "articles_over_time" : { "date_histogram" : { "field" : "date", - "interval" : "1.5h" + "interval" : "90m" } } } } -------------------------------------------------- -See <> for accepted abbreviations. - ==== Keys Internally, a date is represented as a 64 bit number representing a timestamp diff --git a/docs/reference/aggregations/bucket/diversified-sampler-aggregation.asciidoc b/docs/reference/aggregations/bucket/diversified-sampler-aggregation.asciidoc index 1a3180962b0..70412d2680a 100644 --- a/docs/reference/aggregations/bucket/diversified-sampler-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/diversified-sampler-aggregation.asciidoc @@ -115,8 +115,11 @@ Controlling diversity using a script: { "aggs" : { "sample" : { - "diverisfied_sampler" : { - "script" : "doc['author'].value + '/' + doc['genre'].value" + "diversified_sampler" : { + "script" : { + "lang" : "painless", + "inline" : "doc['author'].value + '/' + doc['genre'].value" + } } } } @@ -150,5 +153,7 @@ In this situation an error will be thrown. The de-duplication logic in the diversify settings applies only at a shard level so will not apply across shards. ===== No specialized syntax for geo/date fields -Currently the syntax for defining the diversifying values is defined by a choice of `field` or `script` - there is no added syntactical sugar for expressing geo or date units such as "1w" (1 week). -This support may be added in a later release and users will currently have to create these sorts of values using a script. \ No newline at end of file +Currently the syntax for defining the diversifying values is defined by a choice of `field` or +`script` - there is no added syntactical sugar for expressing geo or date units such as "7d" (7 +days). This support may be added in a later release and users will currently have to create these +sorts of values using a script. diff --git a/docs/reference/aggregations/bucket/range-aggregation.asciidoc b/docs/reference/aggregations/bucket/range-aggregation.asciidoc index 44760fc728f..efcbd9715e1 100644 --- a/docs/reference/aggregations/bucket/range-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/range-aggregation.asciidoc @@ -134,7 +134,10 @@ It is also possible to customize the key for each range: "aggs" : { "price_ranges" : { "range" : { - "script" : "doc['price'].value", + "script" : { + "lang": "painless", + "inline": "doc['price'].value" + }, "ranges" : [ { "to" : 50 }, { "from" : 50, "to" : 100 }, @@ -146,7 +149,7 @@ It is also possible to customize the key for each range: } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -184,9 +187,12 @@ Lets say the product prices are in USD but we would like to get the price ranges "price_ranges" : { "range" : { "field" : "price", - "script" : "_value * conversion_rate", - "params" : { - "conversion_rate" : 0.8 + "script" : { + "lang": "painless", + "inline": "_value * params.conversion_rate", + "params" : { + "conversion_rate" : 0.8 + } }, "ranges" : [ { "to" : 35 }, diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 3c1f4ae860a..959b93611d8 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -9,8 +9,8 @@ Example: -------------------------------------------------- { "aggs" : { - "genders" : { - "terms" : { "field" : "gender" } + "genres" : { + "terms" : { "field" : "genre" } } } } @@ -24,16 +24,20 @@ Response: ... "aggregations" : { - "genders" : { + "genres" : { "doc_count_error_upper_bound": 0, <1> "sum_other_doc_count": 0, <2> "buckets" : [ <3> { - "key" : "male", + "key" : "jazz", "doc_count" : 10 }, { - "key" : "female", + "key" : "rock", + "doc_count" : 10 + }, + { + "key" : "electronic", "doc_count" : 10 }, ] @@ -247,9 +251,9 @@ Ordering the buckets by their `doc_count` in an ascending manner: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "genre", "order" : { "_count" : "asc" } } } @@ -263,9 +267,9 @@ Ordering the buckets alphabetically by their terms in an ascending manner: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "genre", "order" : { "_term" : "asc" } } } @@ -280,13 +284,13 @@ Ordering the buckets by single value metrics sub-aggregation (identified by the -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", - "order" : { "avg_height" : "desc" } + "field" : "genre", + "order" : { "avg_play_count" : "desc" } }, "aggs" : { - "avg_height" : { "avg" : { "field" : "height" } } + "avg_play_count" : { "avg" : { "field" : "play_count" } } } } } @@ -299,13 +303,13 @@ Ordering the buckets by multi value metrics sub-aggregation (identified by the a -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", - "order" : { "height_stats.avg" : "desc" } + "field" : "genre", + "order" : { "playback_stats.avg" : "desc" } }, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" } } + "playback_stats" : { "stats" : { "field" : "play_count" } } } } } @@ -343,14 +347,14 @@ PATH := []*[height_stats.avg" : "desc" } + "field" : "artist.country", + "order" : { "rock>playback_stats.avg" : "desc" } }, "aggs" : { - "females" : { - "filter" : { "term" : { "gender" : "female" }}, + "rock" : { + "filter" : { "term" : { "genre" : "rock" }}, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" }} + "playback_stats" : { "stats" : { "field" : "play_count" }} } } } @@ -359,7 +363,7 @@ PATH := []*[height_stats.avg" : "desc" }, { "_count" : "desc" } ] + "field" : "artist.country", + "order" : [ { "rock>playback_stats.avg" : "desc" }, { "_count" : "desc" } ] }, "aggs" : { - "females" : { - "filter" : { "term" : { "gender" : { "female" }}}, + "rock" : { + "filter" : { "term" : { "genre" : { "rock" }}}, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" }} + "playback_stats" : { "stats" : { "field" : "play_count" }} } } } @@ -385,7 +389,7 @@ Multiple criteria can be used to order the buckets by providing an array of orde } -------------------------------------------------- -The above will sort the countries buckets based on the average height among the female population and then by +The above will sort the artist's countries buckets based on the average play count among the rock songs and then by their `doc_count` in descending order. NOTE: In the event that two buckets share the same values for all order criteria the bucket's term value is used as a @@ -439,10 +443,10 @@ Generating the terms using a script: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { "script" : { - "inline": "doc['gender'].value" + "inline": "doc['genre'].value" "lang": "painless" } } @@ -457,12 +461,12 @@ This will interpret the `script` parameter as an `inline` script with the defaul -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { "script" : { "file": "my_script", "params": { - "field": "gender" + "field": "genre" } } } @@ -480,11 +484,11 @@ TIP: for indexed scripts replace the `file` parameter with an `id` parameter. -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "gendre", "script" : { - "inline" : "'Gender: ' +_value" + "inline" : "'Genre: ' +_value" "lang" : "painless" } } diff --git a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc index f81cd3eee33..9967f4c9d9e 100644 --- a/docs/reference/aggregations/metrics/avg-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/avg-aggregation.asciidoc @@ -42,12 +42,19 @@ Computing the average grade based on a script: ..., "aggs" : { - "avg_grade" : { "avg" : { "script" : "doc['grade'].value" } } + "avg_grade" : { + "avg" : { + "script" : { + "inline" : "doc['grade'].value", + "lang" : "painless" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -86,7 +93,8 @@ It turned out that the exam was way above the level of the students and a grade "avg" : { "field" : "grade", "script" : { - "inline": "_value * correction", + "lang": "painless", + "inline": "_value * params.correction", "params" : { "correction" : 1.2 } diff --git a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc index 1eb0c08772f..9c4ee59cccf 100644 --- a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc @@ -104,14 +104,17 @@ however since hashes need to be computed on the fly. "aggs" : { "author_count" : { "cardinality" : { - "script": "doc['author.first_name'].value + ' ' + doc['author.last_name'].value" + "script": { + "lang": "painless", + "inline": "doc['author.first_name'].value + ' ' + doc['author.last_name'].value" + } } } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc index 30a5acf6809..0e324089dc7 100644 --- a/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/extendedstats-aggregation.asciidoc @@ -86,12 +86,19 @@ Computing the grades stats based on a script: ..., "aggs" : { - "grades_stats" : { "extended_stats" : { "script" : "doc['grade'].value" } } + "grades_stats" : { + "extended_stats" : { + "script" : { + "inline" : "doc['grade'].value", + "lang" : "painless" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -130,7 +137,8 @@ It turned out that the exam was way above the level of the students and a grade "extended_stats" : { "field" : "grade", "script" : { - "inline": "_value * correction", + "lang" : "painless", + "inline": "_value * params.correction", "params" : { "correction" : 1.2 } diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 2a641fda5dc..8cfc0bd998e 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -39,12 +39,19 @@ Computing the max price value across all document, this time using a script: -------------------------------------------------- { "aggs" : { - "max_price" : { "max" : { "script" : "doc['price'].value" } } + "max_price" : { + "max" : { + "script" : { + "inline" : "doc['price'].value", + "lang" : "painless" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -78,7 +85,8 @@ Let's say that the prices of the documents in our index are in USD, but we would "max" : { "field" : "price", "script" : { - "inline": "_value * conversion_rate", + "lang": "painless", + "inline": "_value * params.conversion_rate", "params" : { "conversion_rate" : 1.2 } diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index 7698a41202c..819d70343fb 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -39,12 +39,19 @@ Computing the min price value across all document, this time using a script: -------------------------------------------------- { "aggs" : { - "min_price" : { "min" : { "script" : "doc['price'].value" } } + "min_price" : { + "min" : { + "script" : { + "inline" : "doc['price'].value", + "lang" : "painless" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -78,7 +85,8 @@ Let's say that the prices of the documents in our index are in USD, but we would "min" : { "field" : "price", "script" : - "inline": "_value * conversion_rate", + "lang" : "painless", + "inline": "_value * params.conversion_rate", "params" : { "conversion_rate" : 1.2 } diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index 5357d00461e..dc8c8837344 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -101,7 +101,8 @@ a script to convert them on-the-fly: "load_time_outlier" : { "percentiles" : { "script" : { - "inline": "doc['load_time'].value / timeUnit", <1> + "lang": "painless", + "inline": "doc['load_time'].value / params.timeUnit", <1> "params" : { "timeUnit" : 1000 <2> } @@ -115,7 +116,7 @@ a script to convert them on-the-fly: script to generate values which percentiles are calculated on <2> Scripting supports parameterized input just like any other script -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index 759abb22be5..dcb953ae252 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -73,7 +73,8 @@ a script to convert them on-the-fly: "percentile_ranks" : { "values" : [3, 5], "script" : { - "inline": "doc['load_time'].value / timeUnit", <1> + "lang": "painless", + "inline": "doc['load_time'].value / params.timeUnit", <1> "params" : { "timeUnit" : 1000 <2> } @@ -87,7 +88,7 @@ a script to convert them on-the-fly: script to generate values which percentile ranks are calculated on <2> Scripting supports parameterized input just like any other script -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 639fabb62fd..ba9899f9d68 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -101,7 +101,7 @@ In the above example, the `init_script` creates an array `transactions` in the ` map_script:: Executed once per document collected. This is the only required script. If no combine_script is specified, the resulting state needs to be stored in an object named `_agg`. + -In the above example, the `map_script` checks the value of the type field. If the value if 'sale' the value of the amount field +In the above example, the `map_script` checks the value of the type field. If the value is 'sale' the value of the amount field is added to the transactions array. If the value of the type field is not 'sale' the negated value of the amount field is added to transactions. diff --git a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc index 852c1c3f7a9..a442fb12150 100644 --- a/docs/reference/aggregations/metrics/stats-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/stats-aggregation.asciidoc @@ -48,12 +48,19 @@ Computing the grades stats based on a script: ..., "aggs" : { - "grades_stats" : { "stats" : { "script" : "doc['grade'].value" } } + "grades_stats" : { + "stats" : { + "script" : { + "lang": "painless", + "inline": "doc['grade'].value" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -92,7 +99,8 @@ It turned out that the exam was way above the level of the students and a grade "stats" : { "field" : "grade", "script" : - "inline": "_value * correction", + "lang": "painless", + "inline": "_value * params.correction", "params" : { "correction" : 1.2 } diff --git a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc index d55fcd01018..b9aee74b320 100644 --- a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc @@ -49,12 +49,19 @@ Computing the intraday return based on a script: ..., "aggs" : { - "intraday_return" : { "sum" : { "script" : "doc['change'].value" } } + "intraday_return" : { + "sum" : { + "script" : { + "lang": "painless", + "inline": "doc['change'].value" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- @@ -92,7 +99,10 @@ Computing the sum of squares over all stock tick changes: "daytime_return" : { "sum" : { "field" : "change", - "script" : "_value * _value" + "script" : { + "lang": "painless", + "inline": "_value * _value" + } } } } diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 2bad60ba0ec..1b955d2a898 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -182,7 +182,10 @@ relevancy order of the most relevant document in a bucket. }, "top_hit" : { "max": { - "script": "_score" + "script": { + "lang": "painless", + "inline": "_score" + } } } } diff --git a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc index fa2bfdbbb9d..925f5426187 100644 --- a/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/valuecount-aggregation.asciidoc @@ -43,12 +43,19 @@ Counting the values generated by a script: ..., "aggs" : { - "grades_count" : { "value_count" : { "script" : "doc['grade'].value" } } + "grades_count" : { + "value_count" : { + "script" : { + "inline" : "doc['grade'].value", + "lang" : "painless" + } + } + } } } -------------------------------------------------- -This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax: +This will interpret the `script` parameter as an `inline` script with the `painless` script language and no script parameters. To use a file script use the following syntax: [source,js] -------------------------------------------------- diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 4e487325829..26c1a7476ed 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -171,8 +171,18 @@ one or more maths expressions: * `-1d` - subtract one day * `/d` - round down to the nearest day -The supported <> are: `y` (year), `M` (month), `w` (week), -`d` (day), `h` (hour), `m` (minute), and `s` (second). +The supported time units differ than those supported by <> for durations. +The supported units are: + +[horizontal] +`y`:: years +`M`:: months +`w`:: weeks +`d`:: days +`h`:: hours +`H`:: hours +`m`:: minutes +`s`:: seconds Some examples are: @@ -348,21 +358,17 @@ of supporting the native JSON number types. [float] === Time units -Whenever durations need to be specified, eg for a `timeout` parameter, the -duration must specify the unit, like `2d` for 2 days. The supported units -are: +Whenever durations need to be specified, e.g. for a `timeout` parameter, the duration must specify +the unit, like `2d` for 2 days. The supported units are: [horizontal] -`y`:: Year -`M`:: Month -`w`:: Week -`d`:: Day -`h`:: Hour -`m`:: Minute -`s`:: Second -`ms`:: Milli-second -`micros`:: Micro-second -`nanos`:: Nano-second +`d`:: days +`h`:: hours +`m`:: minutes +`s`:: seconds +`ms`:: milliseconds +`micros`:: microseconds +`nanos`:: nanoseconds [[byte-units]] [float] diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index 81df5cfb127..0af1faa5c9c 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -6,14 +6,9 @@ The `plugins` command provides a view per node of running plugins. This informat [source,sh] ------------------------------------------------------------------------------ % curl 'localhost:9200/_cat/plugins?v' -name component version type isolation url -Abraxas discovery-azure 2.1.0-SNAPSHOT j x -Abraxas lang-javascript 2.0.0-SNAPSHOT j x -Abraxas marvel NA j/s x /_plugin/marvel/ -Abraxas lang-python 2.0.0-SNAPSHOT j x -Abraxas inquisitor NA s /_plugin/inquisitor/ -Abraxas kopf 0.5.2 s /_plugin/kopf/ -Abraxas segmentspy NA s /_plugin/segmentspy/ +name component version description +Abraxas discovery-gce 5.0.0 The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism. +Abraxas lang-javascript 5.0.0 The JavaScript language plugin allows to have javascript as the language of scripts to execute. ------------------------------------------------------------------------------- We can tell quickly how many plugins per node we have and which versions. diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 1fbd0419e2e..c9189b57c01 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -168,7 +168,7 @@ the options. Curl example with update actions: { "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"} } { "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } -{ "script" : { "inline": "ctx._source.counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} +{ "script" : { "inline": "ctx._source.counter += params.param1", "lang" : "painless", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} } { "doc" : {"field" : "value"}, "doc_as_upsert" : true } { "update" : {"_id" : "3", "_type" : "type1", "_index" : "index1", "fields" : ["_source"]} } diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index 8b37e0a1220..b7f31ce4789 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -54,6 +54,10 @@ conflict if the document changes between the time when the snapshot was taken and when the delete request is processed. When the versions match the document is deleted. +NOTE: Since `internal` versioning does not support the value 0 as a valid +version number, documents with version equal to zero cannot be deleted using +`_delete_by_query` and will fail the request. + During the `_delete_by_query` execution, multiple search requests are sequentially executed in order to find all the matching documents to delete. Every time a batch of documents is found, a corresponding bulk request is executed to delete all diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index d63ddb52f4b..1d6544cdd92 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -146,7 +146,8 @@ You can also use the same source filtering parameters to control which parts of curl -XGET 'http://localhost:9200/twitter/tweet/1/_source?_source_include=*.id&_source_exclude=entities' -------------------------------------------------- -Note, there is also a HEAD variant for the _source endpoint to efficiently test for document existence. +Note, there is also a HEAD variant for the _source endpoint to efficiently test for document _source existence. +An existing document will not have a _source if it is disabled in the <>. Curl example: [source,js] diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 6ed032db295..375857e0ba1 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -121,6 +121,14 @@ indexed and the new version number used. If the value provided is less than or equal to the stored document's version number, a version conflict will occur and the index operation will fail. +WARNING: External versioning supports the value 0 as a valid version number. +This allows the version to be in sync with an external versioning system +where version numbers start from zero instead of one. It has the side effect +that documents with version number equal to zero cannot neither be updated +using the <> nor be deleted +using the <> as long as their +version number is equal to zero. + A nice side effect is that there is no need to maintain strict ordering of async indexing operations executed as a result of changes to a source database, as long as version numbers from the source database are used. diff --git a/docs/reference/docs/refresh.asciidoc b/docs/reference/docs/refresh.asciidoc index dd829e19bc3..90c5d4e3afe 100644 --- a/docs/reference/docs/refresh.asciidoc +++ b/docs/reference/docs/refresh.asciidoc @@ -17,11 +17,12 @@ indexing and a search standpoint. Wait for the changes made by the request to be made visible by a refresh before replying. This doesn't force an immediate refresh, rather, it waits for a -refresh happen. Elasticsearch automatically refreshes shards that have changed +refresh to happen. Elasticsearch automatically refreshes shards that have changed every `index.refresh_interval` which defaults to one second. That setting is -<>. The <> API will also -cause the request to return, as will setting `refresh` to `true` on any of the -APIs that support it. +<>. Calling the <> API or +setting `refresh` to `true` on any of the APIs that support it will also +cause a refresh, in turn causing already running requests with `refresh=wait_for` +to return. `false` (the default):: @@ -36,7 +37,7 @@ use `refresh=false`, or, because that is the default, just leave the `refresh` parameter out of the URL. That is the simplest and fastest choice. If you absolutely must have the changes made by a request visible synchronously -with the request then you must get to pick between putting more load on +with the request then you must pick between putting more load on Elasticsearch (`true`) and waiting longer for the response (`wait_for`). Here are a few points that should inform that decision: @@ -97,7 +98,7 @@ search: -------------------------------------------------- PUT /test/test/3 {"test": "test"} -PUT /test/test/4?refresh=true +PUT /test/test/4?refresh=false {"test": "test"} -------------------------------------------------- // CONSOLE diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 525552ae85a..2afc8e86bb1 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -238,7 +238,8 @@ POST _reindex "version_type": "external" }, "script": { - "inline": "if (ctx._source.foo == 'bar') {ctx._version++; ctx._source.remove('foo')}" + "inline": "if (ctx._source.foo == 'bar') {ctx._version++; ctx._source.remove('foo')}", + "lang": "painless" } } -------------------------------------------------- @@ -360,6 +361,60 @@ POST _reindex // CONSOLE // TEST[s/^/PUT source\nGET _cluster\/health?wait_for_status=yellow\n/] +[float] +=== Reindex from Remote + +Reindex supports reindexing from a remote Elasticsearch cluster: + +[source,js] +-------------------------------------------------- +POST _reindex +{ + "source": { + "remote": { + "host": "http://otherhost:9200", + "username": "user", + "password": "pass" + }, + "index": "source", + "query": { + "match": { + "test": "data" + } + } + }, + "dest": { + "index": "dest" + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:host] +// TEST[s/^/PUT source\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/otherhost:9200",/\${host}"/] +// TEST[s/"username": "user",//] +// TEST[s/"password": "pass"//] + +The `host` parameter must contain a scheme, host, and port (e.g. +`https://otherhost:9200`). The `username` and `password` parameters are +optional and when they are present reindex will connect to the remote +Elasticsearch node using using basic auth. Be sure to use `https` when using +basic auth or the password will be sent in plain text. + +Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the +`reindex.remote.whitelist` property. It can be set to a comma delimited list +of allowed remote `host` and `port` combinations (e.g. +`otherhost:9200, another:9200`). Scheme is ignored by the whitelist - only host +and port are used. + +This feature should work with remote clusters of any version of Elasticsearch +you are likely to find. This should allow you to upgrade from any version of +Elasticsearch to the current version by reindexing from a cluster of the old +version. + +To enable queries sent to older versions of Elasticsearch the `query` parameter +is sent directly to the remote host without validation or modification. + [float] === URL Parameters diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 6d6bfe64ecf..06c20bcf07e 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -46,6 +46,10 @@ conflict if the document changes between the time when the snapshot was taken and when the index request is processed. When the versions match the document is updated and the version number is incremented. +NOTE: Since `internal` versioning does not support the value 0 as a valid +version number, documents with version equal to zero cannot be updated using +`_update_by_query` and will fail the request. + All update and query failures cause the `_update_by_query` to abort and are returned in the `failures` of the response. The updates that have been performed still stick. In other words, the process is not rolled back, only @@ -104,7 +108,8 @@ will increment the `likes` field on all of kimchy's tweets: POST twitter/_update_by_query { "script": { - "inline": "ctx._source.likes++" + "inline": "ctx._source.likes++", + "lang": "painless" }, "query": { "term": { diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index 85e5bf2f67f..2edb0a71a3f 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -32,7 +32,8 @@ Now, we can execute a script that would increment the counter: -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "script" : { - "inline": "ctx._source.counter += count", + "inline": "ctx._source.counter += params.count", + "lang": "painless", "params" : { "count" : 4 } @@ -47,7 +48,8 @@ will still add it, since its a list): -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "script" : { - "inline": "ctx._source.tags += tag", + "inline": "ctx._source.tags += params.tag", + "lang": "painless", "params" : { "tag" : "blue" } @@ -85,7 +87,8 @@ the doc if the `tags` field contain `blue`, otherwise it does nothing -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "script" : { - "inline": "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", + "inline": "ctx._source.tags.contains(params.tag) ? ctx.op = \"delete\" : ctx.op = \"none\"", + "lang": "painless", "params" : { "tag" : "blue" } @@ -144,7 +147,8 @@ will be inserted as a new document. If the document does exist, then the -------------------------------------------------- curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{ "script" : { - "inline": "ctx._source.counter += count", + "inline": "ctx._source.counter += params.count", + "lang": "painless", "params" : { "count" : 4 } diff --git a/docs/reference/how-to.asciidoc b/docs/reference/how-to.asciidoc new file mode 100644 index 00000000000..ee954553617 --- /dev/null +++ b/docs/reference/how-to.asciidoc @@ -0,0 +1,22 @@ +[[how-to]] += How To + +[partintro] +-- +Elasticsearch ships with defaults which are intended to give a good out of +the box experience. Full text search, highlighting, aggregations, indexing +should all just work without the user having to change anything. + +Once you better understand how you want to use Elasticsearch, however, +there are a number of optimizations you can make to improve performance +for your use case. + +This section provides guidance about which changes should and shouldn't be +made. +-- + +include::how-to/indexing-speed.asciidoc[] + +include::how-to/search-speed.asciidoc[] + +include::how-to/disk-usage.asciidoc[] diff --git a/docs/reference/how-to/disk-usage.asciidoc b/docs/reference/how-to/disk-usage.asciidoc new file mode 100644 index 00000000000..6465690ec96 --- /dev/null +++ b/docs/reference/how-to/disk-usage.asciidoc @@ -0,0 +1,159 @@ +[[tune-for-disk-usage]] +== Tune for disk usage + +[float] +=== Disable the features you do not need + +By default elasticsearch indexes and adds doc values to most fields so that they +can be searched and aggregated out of the box. For instance if you have a numeric +field called `foo` that you need to run histograms on but that you never need to +filter on, you can safely disable indexing on this field in your +<>: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "foo": { + "type": "integer", + "index": false + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<> fields store normalization factors in the index in order to be +able to score documents. If you only need matching capabilities on a `text` +field but do not care about the produced scores, you can configure elasticsearch +to not write norms to the index: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "foo": { + "type": "text", + "norms": false + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<> fields also store frequencies and positions in the index by +default. Frequencies are used to compute scores and positions are used to run +phrase queries. If you do not need to run phrase queries, you can tell +elasticsearch to not index positions: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "foo": { + "type": "text", + "index_options": "freqs" + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +Furthermore if you do not care about scoring either, you can configure +elasticsearch to just index matching documents for every term. You will +still be able to search on this field, but phrase queries will raise errors +and scoring will assume that terms appear only once in every document. + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "foo": { + "type": "text", + "norms": false, + "index_options": "freqs" + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +[float] +=== Don't use default dynamic string mappings + +The default <> will index string fields +both as <> and <>. This is wasteful if you only +need one of them. Typically an `id` field will only need to be indexed as a +`keyword` while a `body` field will only need to be indexed as a `text` field. + +This can be disabled by either configuring explicit mappings on string fields +or setting up dynamic templates that will map string fields as either `text` +or `keyword`. + +For instance, here is a template that can be used in order to only map string +fields as `keyword`: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "dynamic_templates": [ + { + "strings": { + "match_mapping_type": "string", + "mapping": { + "type": "keyword" + } + } + } + ] + } + } +} +-------------------------------------------------- +// CONSOLE + +[float] +=== Disable `_all` + +The <> field indexes the value of all fields of a +document and can use significant space. If you never need to search against all +fields at the same time, it can be disabled. + +[float] +=== Use `best_compression` + +The `_source` and stored fields can easily take a non negligible amount of disk +space. They can be compressed more aggressively by using the `best_compression` +<>. + +[float] +=== Use the smallest numeric type that is sufficient + +When storing <>, using `float` over `double`, or `half_float` +over `float` can help save storage. This is also true for integer types, but less +since Elasticsearch will more easily compress them based on the number of bits +that they actually need. + diff --git a/docs/reference/how-to/indexing-speed.asciidoc b/docs/reference/how-to/indexing-speed.asciidoc new file mode 100644 index 00000000000..bb5a367a04c --- /dev/null +++ b/docs/reference/how-to/indexing-speed.asciidoc @@ -0,0 +1,106 @@ +[[tune-for-indexing-speed]] +== Tune for indexing speed + +[float] +=== Use bulk requests + +Bulk requests will yield much better performance than single-document index +requests. In order to know the optimal size of a bulk request, you shoud run +a benchmark on a single node with a single shard. First try to index 100 +documents at once, then 200, then 400, etc. doubling the number of documents +in a bulk request in every benchmark run. When the indexing speed starts to +plateau then you know you reached the optimal size of a bulk request for your +data. In case of tie, it is better to err in the direction of too few rather +than too many documents. Beware that too large bulk requests might put the +cluster under memory pressure when many of them are sent concurrently, so +it is advisable to avoid going beyond a couple tens of megabytes per request +even if larger requests seem to perform better. + +[float] +=== Use multiple workers/threads to send data to elasticsearch + +A single thread sending bulk requests is unlikely to be able to max out the +indexing capacity of an elasticsearch cluster. In order to use all resources +of the cluster, you should send data from multiple threads or processes. In +addition to making better use of the resources of the cluster, this should +help reduce the cost of each fsync. + +Make sure to watch for `TOO_MANY_REQUESTS (429)` response codes +(`EsRejectedExecutionException` with the Java client), which is the way that +elasticsearch tells you that it cannot keep up with the current indexing rate. +When it happens, you should pause indexing a bit before trying again, ideally +with randomized exponential backoff. + +Similarly to sizing bulk requests, only testing can tell what the optimal +number of workers is. This can be tested by progressivily increasing the +number of workers until either I/O or CPU is saturated on the cluster. + +[float] +=== Increase the refresh interval + +The default <> is `1s`, which +forces elasticsearch to create a new segment every second. +Increasing this value (to say, `30s`) will allow larger segments to flush and +decreases future merge pressure. + +[float] +=== Disable refresh and replicas for initial loads + +If you need to load a large amount of data at once, you should disable refresh +by setting `index.refresh_interval` to `-1` and set `index.number_of_replicas` +to `0`. This will temporarily put your index at risk since the loss of any shard +will cause data loss, but at the same time indexing will be faster since +documents will be indexed only once. Once the initial loading is finished, you +can set `index.refresh_interval` and `index.number_of_replicas` back to their +original values. + +[float] +=== Disable swapping + +You should make sure that the operating system is not swapping out the java +process by <>. + +[float] +=== Give memory to the filesystem cache + +The filesystem cache will be used in order to buffer I/O operations. You should +make sure to give at least half the memory of the machine running elasticsearch +to the filesystem cache. + +[float] +=== Use faster hardware + +If indexing is I/O bound, you should investigate giving more memory to the +filesystem cache (see above) or buying faster drives. In particular SSD drives +are known to perform better than spinning disks. Always use local storage, +remote filesystems such as `NFS` or `SMB` should be avoided. Also beware of +virtualized storage such as Amazon's `Elastic Block Storage`. Virtualized +storage works very well with Elasticsearch, and it is appealing since it is so +fast and simple to set up, but it is also unfortunately inherently slower on an +ongoing basis when compared to dedicated local storage. If you put an index on +`EBS`, be sure to use provisioned IOPS otherwise operations could be quickly +throttled. + +Stripe your index across multiple SSDs by configuring a RAID 0 array. Remember +that it will increase the risk of failure since the failure of any one SSD +destroys the index. However this is typically the right tradeoff to make: +optimize single shards for maximum performance, and then add replicas across +different nodes so there's redundancy for any node failures. You can also use +<> to backup the index for further +insurance. + +[float] +=== Indexing buffer size + +If your node is doing only heavy indexing, be sure +<> is large enough to give +at most 512 MB indexing buffer per shard doing heavy indexing (beyond that +indexing performance does not typically improve). Elasticsearch takes that +setting (a percentage of the java heap or an absolute byte-size), and +uses it as a shared buffer across all active shards. Very active shards will +naturally use this buffer more than shards that are performing lightweight +indexing. + +The default is `10%` which is often plenty: for example, if you give the JVM +10GB of memory, it will give 1GB to the index buffer, which is enough to host +two shards that are heavily indexing. diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc new file mode 100644 index 00000000000..67848c9edca --- /dev/null +++ b/docs/reference/how-to/search-speed.asciidoc @@ -0,0 +1,194 @@ +[[tune-for-search-speed]] +== Tune for search speed + +[float] +=== Give memory to the filesystem cache + +Elasticsearch heavily relies on the filesystem cache in order to make search +fast. In general, you should make sure that at least half the available memory +goes to the filesystem cache so that elasticsearch can keep hot regions of the +index in physical memory. + +[float] +=== Use faster hardware + +If your search is I/O bound, you should investigate giving more memory to the +filesystem cache (see above) or buying faster drives. In particular SSD drives +are known to perform better than spinning disks. Always use local storage, +remote filesystems such as `NFS` or `SMB` should be avoided. Also beware of +virtualized storage such as Amazon's `Elastic Block Storage`. Virtualized +storage works very well with Elasticsearch, and it is appealing since it is so +fast and simple to set up, but it is also unfortunately inherently slower on an +ongoing basis when compared to dedicated local storage. If you put an index on +`EBS`, be sure to use provisioned IOPS otherwise operations could be quickly +throttled. + +If your search is CPU-bound, you should investigate buying faster CPUs. + +[float] +=== Document modeling + +Documents should be modeled so that search-time operations are as cheap as possible. + +In particular, joins should be avoided. <> can make queries +several times slower and <> relations can make +queries hundreds of times slower. So if the same questions can be answered without +joins by denormalizing documents, significant speedups can be expected. + +[float] +=== Pre-index data + +You should leverage patterns in your queries to optimize the way data is indexed. +For instance, if all your documents have a `price` field and most queries run +<> aggregations on a fixed +list of ranges, you could make this aggregation faster by pre-indexing the ranges +into the index and using a <> +aggregations. + +For instance, if documents look like: + +[source,js] +-------------------------------------------------- +PUT index/type/1 +{ + "designation": "spoon", + "price": 13 +} +-------------------------------------------------- +// CONSOLE + +and search requests look like: + +[source,js] +-------------------------------------------------- +GET index/_search +{ + "aggs": { + "price_ranges": { + "range": { + "field": "price", + "ranges": [ + { "to": 10 }, + { "from": 10, "to": 100 }, + { "from": 100 } + ] + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Then documents could be enriched by a `price_range` field at index time, which +should be mapped as a <>: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "price_range": { + "type": "keyword" + } + } + } + } +} + +PUT index/type/1 +{ + "designation": "spoon", + "price": 13, + "price_range": "10-100" +} +-------------------------------------------------- +// CONSOLE + +And then search requests could aggregate this new field rather than running a +`range` aggregation on the `price` field. + +[source,js] +-------------------------------------------------- +GET index/_search +{ + "aggs": { + "price_ranges": { + "terms": { + "field": "price_range" + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +[float] +=== Mappings + +The fact that some data is numeric does not mean it should always be mapped as a +<>. Typically, fields storing identifiers such as an `ISBN` +or any number identifying a record from another database, might benefit from +being mapped as <> rather than `integer` or `long`. + +[float] +=== Avoid scripts + +In general, scripts should be avoided. If they are absolutely needed, you +should prefer the `painless` and `expressions` engines. + +[float] +=== Force-merge read-only indices + +Indices that are read-only would benefit from being +<>. This is typically the +case with time-based indices: only the index for the current time frame is +getting new documents while older indices are read-only. + +IMPORTANT: Don't force-merge indices that are still being written to -- leave +merging to the background merge process. + +[float] +=== Warm up global ordinals + +Global ordinals are a data-structure that is used in order to run +<> aggregations on +<> fields. They are loaded lazily in memory because +elasticsearch does not know which fields will be used in `terms` aggregations +and which fields won't. You can tell elasticsearch to load global ordinals +eagerly at refresh-time by configuring mappings as described below: + +[source,js] +-------------------------------------------------- +PUT index +{ + "mappings": { + "type": { + "properties": { + "foo": { + "type": "keyword", + "eager_global_ordinals": true + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +[float] +=== Warm up the filesystem cache + +If the machine running elasticsearch is restarted, the filesystem cache will be +empty, so it will take some time before the operating system loads hot regions +of the index into memory so that search operations are fast. You can explicitly +tell the operating system which files should be loaded into memory eagerly +depending on the file extension using the <> +setting. + +WARNING: Loading data into the filesystem cache eagerly on too many indices or +too many files will make search _slower_ if the filesystem cache is not large +enough to hold all the data. Use with caution. diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 132d763f714..636475dfb47 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -1,7 +1,7 @@ [[elasticsearch-reference]] = Elasticsearch Reference -:version: 5.0.0-alpha3 +:version: 5.0.0-alpha4 :major-version: 5.x :branch: master :jdk: 1.8.0_73 @@ -43,6 +43,8 @@ include::index-modules.asciidoc[] include::ingest.asciidoc[] +include::how-to.asciidoc[] + include::testing.asciidoc[] include::glossary.asciidoc[] diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index 60360c147b5..3a0b23852b0 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -10,12 +10,12 @@ index. In order to fully utilize the `index.data_path` and `index.shadow_replicas` settings, you need to allow Elasticsearch to use the same data directory for -multiple instances by setting `node.add_id_to_custom_path` to false in +multiple instances by setting `node.add_lock_id_to_custom_path` to false in elasticsearch.yml: [source,yaml] -------------------------------------------------- -node.add_id_to_custom_path: false +node.add_lock_id_to_custom_path: false -------------------------------------------------- You will also need to indicate to the security manager where the custom indices @@ -114,7 +114,7 @@ settings API: These are non-dynamic settings that need to be configured in `elasticsearch.yml` -`node.add_id_to_custom_path`:: +`node.add_lock_id_to_custom_path`:: Boolean setting indicating whether Elasticsearch should append the node's ordinal to the custom data path. For example, if this is enabled and a path of "/tmp/foo" is used, the first locally-running node will use "/tmp/foo/0", diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 39c5134e23c..2975ef903f3 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -76,8 +76,8 @@ Indices can only be shrunk if they satisfy the following requirements: * The index must have more primary shards than the target index. * The number of primary shards in the target index must be a factor of the - number of primary shards in the source index. must have more primary shards - than the target index. + number of primary shards in the source index. The source index must have + more primary shards than the target index. * The index must not contain more than `2,147,483,519` documents in total across all shards that will be shrunk into a single shard on the target index diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index b03ed641de7..ec4f9c30e66 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -46,7 +46,6 @@ PUT _ingest/pipeline/my-pipeline-id "value": "bar" } } - // other processors ] } -------------------------------------------------- @@ -83,7 +82,6 @@ Example response: "value": "bar" } } - // other processors ] } } ] diff --git a/docs/reference/mapping/fields/field-names-field.asciidoc b/docs/reference/mapping/fields/field-names-field.asciidoc index a56a9081e88..cf8c6398d2e 100644 --- a/docs/reference/mapping/fields/field-names-field.asciidoc +++ b/docs/reference/mapping/fields/field-names-field.asciidoc @@ -31,7 +31,10 @@ GET my_index/_search }, "script_fields": { "Field names": { - "script": "doc['_field_names']" <2> + "script": { + "lang": "painless", + "inline": "doc['_field_names']" <2> + } } } } @@ -40,4 +43,4 @@ GET my_index/_search // CONSOLE <1> Querying on the `_field_names` field (also see the <> query) -<2> Accessing the `_field_names` field in scripts (inline scripts must be <> for this example to work) +<2> Accessing the `_field_names` field in scripts diff --git a/docs/reference/mapping/fields/id-field.asciidoc b/docs/reference/mapping/fields/id-field.asciidoc index a852f4f4770..c640b561571 100644 --- a/docs/reference/mapping/fields/id-field.asciidoc +++ b/docs/reference/mapping/fields/id-field.asciidoc @@ -7,8 +7,8 @@ indexed as its value can be derived automatically from the <> field. The value of the `_id` field is accessible in certain queries (`term`, -`terms`, `match`, `query_string`, `simple_query_string`) and scripts, but -_not_ in aggregations or when sorting, where the <> +`terms`, `match`, `query_string`, `simple_query_string`), but +_not_ in aggregations, scripts or when sorting, where the <> field should be used instead: [source,js] @@ -30,16 +30,9 @@ GET my_index/_search "terms": { "_id": [ "1", "2" ] <1> } - }, - "script_fields": { - "UID": { - "script": "doc['_id']" <2> - } } } -------------------------- // CONSOLE <1> Querying on the `_id` field (also see the <>) -<2> Accessing the `_id` field in scripts (inline scripts must be <> for this example to work) - diff --git a/docs/reference/mapping/fields/index-field.asciidoc b/docs/reference/mapping/fields/index-field.asciidoc index c31b6f1d0f7..599fedba62c 100644 --- a/docs/reference/mapping/fields/index-field.asciidoc +++ b/docs/reference/mapping/fields/index-field.asciidoc @@ -50,7 +50,10 @@ GET index_1,index_2/_search ], "script_fields": { "index_name": { - "script": "doc['_index']" <4> + "script": { + "lang": "painless", + "inline": "doc['_index']" <4> + } } } } @@ -60,4 +63,4 @@ GET index_1,index_2/_search <1> Querying on the `_index` field <2> Aggregating on the `_index` field <3> Sorting on the `_index` field -<4> Accessing the `_index` field in scripts (inline scripts must be <> for this example to work) +<4> Accessing the `_index` field in scripts diff --git a/docs/reference/mapping/fields/parent-field.asciidoc b/docs/reference/mapping/fields/parent-field.asciidoc index 9dd8646172d..30b2e1a2086 100644 --- a/docs/reference/mapping/fields/parent-field.asciidoc +++ b/docs/reference/mapping/fields/parent-field.asciidoc @@ -81,7 +81,10 @@ GET my_index/_search }, "script_fields": { "parent": { - "script": "doc['_parent']" <3> + "script": { + "lang": "painless", + "inline": "doc['_parent']" <3> + } } } } @@ -91,7 +94,7 @@ GET my_index/_search <1> Querying on the `_parent` field (also see the <> and the <>) <2> Aggregating on the `_parent` field (also see the <> aggregation) -<3> Accessing the `_parent` field in scripts (inline scripts must be <> for this example to work) +<3> Accessing the `_parent` field in scripts ==== Parent-child restrictions diff --git a/docs/reference/mapping/fields/routing-field.asciidoc b/docs/reference/mapping/fields/routing-field.asciidoc index 49f269511dc..c8a92de1368 100644 --- a/docs/reference/mapping/fields/routing-field.asciidoc +++ b/docs/reference/mapping/fields/routing-field.asciidoc @@ -42,7 +42,10 @@ GET my_index/_search }, "script_fields": { "Routing value": { - "script": "doc['_routing']" <2> + "script": { + "lang": "painless", + "inline": "doc['_routing']" <2> + } } } } @@ -50,7 +53,7 @@ GET my_index/_search // CONSOLE <1> Querying on the `_routing` field (also see the <>) -<2> Accessing the `_routing` field in scripts (inline scripts must be <> for this example to work) +<2> Accessing the `_routing` field in scripts ==== Searching with custom routing diff --git a/docs/reference/mapping/fields/type-field.asciidoc b/docs/reference/mapping/fields/type-field.asciidoc index 703ee9930d2..fecc0143e94 100644 --- a/docs/reference/mapping/fields/type-field.asciidoc +++ b/docs/reference/mapping/fields/type-field.asciidoc @@ -45,7 +45,10 @@ GET my_index/type_*/_search ], "script_fields": { "type": { - "script": "doc['_type']" <4> + "script": { + "lang": "painless", + "inline": "doc['_type']" <4> + } } } } @@ -56,5 +59,5 @@ GET my_index/type_*/_search <1> Querying on the `_type` field <2> Aggregating on the `_type` field <3> Sorting on the `_type` field -<4> Accessing the `_type` field in scripts (inline scripts must be <> for this example to work) +<4> Accessing the `_type` field in scripts diff --git a/docs/reference/mapping/fields/uid-field.asciidoc b/docs/reference/mapping/fields/uid-field.asciidoc index 43d5751d426..82095160646 100644 --- a/docs/reference/mapping/fields/uid-field.asciidoc +++ b/docs/reference/mapping/fields/uid-field.asciidoc @@ -45,7 +45,10 @@ GET my_index/_search ], "script_fields": { "UID": { - "script": "doc['_uid']" <4> + "script": { + "lang": "painless", + "inline": "doc['_uid']" <4> + } } } } @@ -55,5 +58,4 @@ GET my_index/_search <1> Querying on the `_uid` field (also see the <>) <2> Aggregating on the `_uid` field <3> Sorting on the `_uid` field -<4> Accessing the `_uid` field in scripts (inline scripts must be <> for this example to work) - +<4> Accessing the `_uid` field in scripts diff --git a/docs/reference/mapping/params/fielddata.asciidoc b/docs/reference/mapping/params/fielddata.asciidoc index 92151c56d49..2e6b63698c7 100644 --- a/docs/reference/mapping/params/fielddata.asciidoc +++ b/docs/reference/mapping/params/fielddata.asciidoc @@ -81,7 +81,7 @@ can move the loading time from the first search request, to the refresh itself. Fielddata filtering can be used to reduce the number of terms loaded into memory, and thus reduce memory usage. Terms can be filtered by _frequency_: -The frequency filter allows you to only load terms whose term frequency falls +The frequency filter allows you to only load terms whose document frequency falls between a `min` and `max` value, which can be expressed an absolute number (when the number is bigger than 1.0) or as a percentage (eg `0.01` is `1%` and `1.0` is `100%`). Frequency is calculated diff --git a/docs/reference/mapping/types/boolean.asciidoc b/docs/reference/mapping/types/boolean.asciidoc index 914646a7220..a5f559facc2 100644 --- a/docs/reference/mapping/types/boolean.asciidoc +++ b/docs/reference/mapping/types/boolean.asciidoc @@ -76,13 +76,15 @@ GET my_index/_search }, "script_fields": { "is_published": { - "script": "doc['is_published'].value" <1> + "script": { + "lang": "painless", + "inline": "doc['is_published'].value" + } } } } -------------------------------------------------- // CONSOLE -<1> Inline scripts must be <> for this example to work. [[boolean-params]] ==== Parameters for `boolean` fields diff --git a/docs/reference/migration/migrate_5_0/aggregations.asciidoc b/docs/reference/migration/migrate_5_0/aggregations.asciidoc index d9227e91385..287da1efb99 100644 --- a/docs/reference/migration/migrate_5_0/aggregations.asciidoc +++ b/docs/reference/migration/migrate_5_0/aggregations.asciidoc @@ -26,3 +26,8 @@ for `from` and `to` anymore. `size: 0` is no longer valid for the terms, significant terms and geohash grid aggregations. Instead a size should be explicitly specified with a number greater than zero. + +==== Fractional time values + +Fractional time values (e.g., 0.5s) are no longer supported. For example, this means when setting +date histogram intervals "1.5h" will be rejected and should instead be input as "90m". diff --git a/docs/reference/migration/migrate_5_0/fs.asciidoc b/docs/reference/migration/migrate_5_0/fs.asciidoc index 859f3092823..42c8b4ddcea 100644 --- a/docs/reference/migration/migrate_5_0/fs.asciidoc +++ b/docs/reference/migration/migrate_5_0/fs.asciidoc @@ -23,3 +23,9 @@ behavior will be removed. If you are using a multi-cluster setup with both instances of Elasticsearch pointing to the same data path, you will need to add the cluster name to the data path so that different clusters do not overwrite data. + +==== Local files + +Prior to 5.0, nodes that were marked with both `node.data: false` and `node.master: false` (or the now removed `node.client: true`) +didn't write any files or folder to disk. 5.x added persistent node ids, requiring nodes to store that information. As such, all +node types will write a small state file to their data folders. \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/plugins.asciidoc b/docs/reference/migration/migrate_5_0/plugins.asciidoc index a1c0dad9ca1..79583c6b925 100644 --- a/docs/reference/migration/migrate_5_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_5_0/plugins.asciidoc @@ -63,7 +63,7 @@ Proxy settings for both plugins have been renamed: Cloud Azure plugin has been split in three plugins: -* {plugins}/discovery-azure.html[Discovery Azure plugin] +* {plugins}/discovery-azure-classic.html[Discovery Azure plugin] * {plugins}/repository-azure.html[Repository Azure plugin] * {plugins}/store-smb.html[Store SMB plugin] @@ -122,3 +122,29 @@ been removed. Plugins that register custom scripts should implement `ScriptPlugin` and remove their `onModule(ScriptModule)` implementation. + +==== AnalysisPlugin + +Plugins that register custom analysis components should implement +`AnalysisPlugin` and remove their `onModule(AnalysisModule)` implementation. + +==== MapperPlugin + +Plugins that register custom mappers should implement +`MapperPlugin` and remove their `onModule(IndicesModule)` implementation. + +==== ActionPlugin + +Plugins that register custom actions should implement `ActionPlugin` and +remove their `onModule(ActionModule)` implementation. + +Plugins that register custom `RestHandler`s should implement `ActionPlugin` and +remove their `onModule(NetworkModule)` implemnetation. + +==== Mapper-Size plugin + +The metadata field `_size` is not accessible in aggregations, scripts and when +sorting for indices created in 2.x even if the index has been upgraded using the <> API. +If these features are needed in your application it is required to reindex the data with Elasticsearch 5.x. +The easiest way to reindex old indices is to use the `reindex` API, or the reindex UI provided by +the <>. diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index 4863588818f..7bfa9dc875c 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -26,6 +26,8 @@ should be used instead. The `name` setting has been removed and is replaced by `node.name`. Usage of `-Dname=some_node_name` is not supported anymore. +The `node.add_id_to_custom_path` was renamed to `add_lock_id_to_custom_path`. + ==== Node attribute settings Node level attributes used for allocation filtering, forced awareness or other node identification / grouping @@ -250,11 +252,11 @@ Due to the fact that indexed script has been replaced by stored scripts the following settings have been replaced to: * `script.indexed` has been replaced by `script.stored` -* `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) -* `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `plainless` etc.) +* `script.engine.*.indexed.aggs` has been replaced by `script.engine.*.stored.aggs` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.mapping` has been replaced by `script.engine.*.stored.mapping` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.search` has been replaced by `script.engine.*.stored.search` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.update` has been replaced by `script.engine.*.stored.update` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) +* `script.engine.*.indexed.plugin` has been replaced by `script.engine.*.stored.plugin` (where `*` represents the script language, like `groovy`, `mustache`, `painless` etc.) ==== Script mode settings @@ -286,3 +288,10 @@ The setting `bootstrap.mlockall` has been renamed to The default setting `include_global_state` for restoring snapshots has been changed from `true` to `false`. It has not been changed for taking snapshots and still defaults to `true` in that case. + +==== Time value parsing + +The unit 'w' representing weeks is no longer supported. + +Fractional time values (e.g., 0.5s) are no longer supported. For example, this means when setting +timeouts "0.5s" will be rejected and should instead be input as "500ms". diff --git a/docs/reference/modules/discovery/azure.asciidoc b/docs/reference/modules/discovery/azure.asciidoc index 87d072564b3..1343819b02a 100644 --- a/docs/reference/modules/discovery/azure.asciidoc +++ b/docs/reference/modules/discovery/azure.asciidoc @@ -1,5 +1,5 @@ -[[modules-discovery-azure]] -=== Azure Discovery +[[modules-discovery-azure-classic]] +=== Azure Classic Discovery -Azure discovery allows to use the Azure APIs to perform automatic discovery (similar to multicast). -It is available as a plugin. See {plugins}/discovery-azure.html[discovery-azure] for more information. +Azure classic discovery allows to use the Azure Classic APIs to perform automatic discovery (similar to multicast). +It is available as a plugin. See {plugins}/discovery-azure-classic.html[discovery-azure-classic] for more information. diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc index feb8113a6da..76065210569 100644 --- a/docs/reference/modules/scripting.asciidoc +++ b/docs/reference/modules/scripting.asciidoc @@ -96,6 +96,8 @@ include::scripting/groovy.asciidoc[] include::scripting/painless.asciidoc[] +include::scripting/painless-syntax.asciidoc[] + include::scripting/expression.asciidoc[] include::scripting/native.asciidoc[] diff --git a/docs/reference/modules/scripting/painless-syntax.asciidoc b/docs/reference/modules/scripting/painless-syntax.asciidoc new file mode 100644 index 00000000000..88c2cdb11a9 --- /dev/null +++ b/docs/reference/modules/scripting/painless-syntax.asciidoc @@ -0,0 +1,173 @@ +[[modules-scripting-painless-syntax]] +=== Painless Syntax + +experimental[The Painless scripting language is new and is still marked as experimental. The syntax or API may be changed in the future in non-backwards compatible ways if required.] + +[float] +[[painless-types]] +=== Variable types + +Painless supports all of https://docs.oracle.com/javase/tutorial/java/nutsandbolts/variables.html[Java's types], +including array types, but adds some additional built-in types. + +[float] +[[painless-def]] +==== Def + +The dynamic type `def` serves as a placeholder for any other type. It adopts the behavior +of whatever runtime type it represents. + +[float] +[[painless-strings]] +==== String + +String constants can be declared with single quotes, to avoid escaping horrors with JSON: + +[source,js] +--------------------------------------------------------- +def mystring = 'foo'; +--------------------------------------------------------- + +[float] +[[painless-lists]] +==== List + +Lists can be created explicitly (e.g. `new ArrayList()`) or initialized similar to Groovy: + +[source,js] +--------------------------------------------------------- +def list = [1,2,3]; +--------------------------------------------------------- + +Lists can also be accessed similar to arrays: they support subscript and `.length`: + +[source,js] +--------------------------------------------------------- +def list = [1,2,3]; +return list[0] +--------------------------------------------------------- + +[float] +[[painless-maps]] +==== Map + +Maps can be created explicitly (e.g. `new HashMap()`) or initialized similar to Groovy: + +[source,js] +--------------------------------------------------------- +def person = ['name': 'Joe', 'age': 63]; +--------------------------------------------------------- + +Map keys can also be accessed as properties. + +[source,js] +--------------------------------------------------------- +def person = ['name': 'Joe', 'age': 63]; +person.retired = true; +return person.name +--------------------------------------------------------- + +Map keys can also be accessed via subscript (for keys containing special characters): + +[source,js] +--------------------------------------------------------- +return map['something-absurd!'] +--------------------------------------------------------- + +[float] +[[painless-pattern]] +==== Pattern + +Regular expression constants are directly supported: + +[source,js] +--------------------------------------------------------- +Pattern p = /[aeiou]/ +--------------------------------------------------------- + +Patterns can only be created via this mechanism. This ensures fast performance, regular expressions +are always constants and compiled efficiently a single time. + +[float] +[[modules-scripting-painless-regex-flags]] +==== Pattern flags + +You can define flags on patterns in Painless by adding characters after the +trailing `/` like `/foo/i` or `/foo \w #comment/iUx`. Painless exposes all the +flags from +https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java's Pattern class] +using these characters: + +[cols="<,<,<",options="header",] +|======================================================================= +| Character | Java Constant | Example +|`c` | CANON_EQ | `'å' ==~ /å/c` (open in hex editor to see) +|`i` | CASE_INSENSITIVE | `'A' ==~ /a/i` +|`l` | LITERAL | `'[a]' ==~ /[a]/l` +|`m` | MULTILINE | `'a\nb\nc' =~ /^b$/m` +|`s` | DOTALL (aka single line) | `'a\nb\nc' =~ /.b./s` +|`U` | UNICODE_CHARACTER_CLASS | `'Ɛ' ==~ /\\w/U` +|`u` | UNICODE_CASE | `'Ɛ' ==~ /ɛ/iu` +|`x` | COMMENTS (aka extended) | `'a' ==~ /a #comment/x` +|======================================================================= + +[float] +[[painless-operators]] +=== Operators + +All of Java's https://docs.oracle.com/javase/tutorial/java/nutsandbolts/operators.html[operators] are +supported with the same precedence, promotion, and semantics. + +There are only a few minor differences and add-ons: + +* `==` behaves as Java's for numeric types, but for non-numeric types acts as https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#equals-java.lang.Object-[`Object.equals()`] +* `===` and `!==` support exact reference comparison (e.g. `x === y`) +* `=~` true if a portion of the text matches a pattern (e.g. `x =~ /b/`) +* `==~` true if the entire text matches a pattern (e.g. `x ==~ /[Bb]ob/`) + +[float] +[[painless-control-flow]] +=== Control flow + +Java's https://docs.oracle.com/javase/tutorial/java/nutsandbolts/flow.html[control flow statements] are supported, with the exception +of the `switch` statement. + +In addition to Java's `enhanced for` loop, the `for in` syntax from groovy can also be used: + +[source,js] +--------------------------------------------------------- +for (item : list) { + ... +} +--------------------------------------------------------- + +[float] +[[painless-functions]] +=== Functions + +Functions can be declared at the beginning of the script, for example: + +[source,js] +--------------------------------------------------------- +boolean isNegative(def x) { x < 0 } +... +if (isNegative(someVar)) { + ... +} +--------------------------------------------------------- + +[float] +[[painless-lambda-expressions]] +=== Lambda expressions +Lambda expressions and method references work the same as https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html[Java's]. + +[source,js] +--------------------------------------------------------- +list.removeIf(item -> item == 2); +list.removeIf((int item) -> item == 2); +list.removeIf((int item) -> { item == 2 }); +list.sort((x, y) -> x - y); +list.sort(Integer::compare); +--------------------------------------------------------- + +Method references to functions within the script can be accomplished using `this`, e.g. `list.sort(this::mycompare)`. diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index b4af07035ee..ad36cdd6df4 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -8,9 +8,7 @@ by default. It is designed specifically for use with Elasticsearch and can safely be used with `inline` and `stored` scripting, which is enabled by default. -A Painless script is essentially a single function. Painless does not provide support -for defining multiple functions within a script. The Painless syntax is similar to -http://groovy-lang.org/index.html[Groovy]. +The Painless syntax is similar to http://groovy-lang.org/index.html[Groovy]. You can use Painless anywhere a script can be used in Elasticsearch--simply set the `lang` parameter to `painless`. @@ -19,22 +17,15 @@ to `painless`. [float] == Painless Features -* Control flow: `for` loops, `while` loops, `do/while` loops, `if/else` +* Fast performance: https://benchmarks.elastic.co/index.html#search_qps_scripts[several times faster] than the alternatives. -* Fully Typed: all available types/methods described in <> +* Safety: Fine-grained <> with method call/field granularity. -* Arithmetic operators: multiplication `*`, division `/`, addition `+`, subtraction `-`, precedence `( )` +* Optional typing: Variables and parameters can use explicit types or the dynamic `def` type. -* Comparison operators: less than `<`, less than or equal to `<=`, greater than `>`, greater than or equal to `>=`, equal to `==`, and not equal to `!=`, reference equals `===`, reference not equals `!==` - -* Boolean operators: not `!`, and `&&`, or `||` - -* Bitwise operators: shift left `<<`, shift right `>>`, unsigned shift `>>>`, and `&`, or `|`, xor `^`, not `~` - -* Shortcuts for list, map access using the dot `.` operator - -* Native support for regular expressions with `/pattern/`, `=~`, and `==~` +* Syntax: Extends Java's syntax with a subset of Groovy for ease of use. See the <>. +* Optimizations: Designed specifically for Elasticsearch scripting. [[painless-examples]] [float] @@ -74,7 +65,7 @@ PUT hockey/player/_bulk?refresh [float] === Accessing Doc Values from Painless -Document values can be accessed from a `Map` named `doc`. +Document values can be accessed from a `Map` named `doc`. For example, the following script calculates a player's total goals. This example uses a strongly typed `int` and a `for` loop. @@ -246,8 +237,8 @@ POST hockey/player/_update_by_query ---------------------------------------------------------------- // CONSOLE -Or you can use the `Pattern.matcher` directory to get a `Matcher` instance and -remove all of the vowels in all of their names: +You can use the `Pattern.matcher` directly to get a `Matcher` instance and +remove all of the vowels in all of their last names: [source,js] ---------------------------------------------------------------- @@ -261,6 +252,59 @@ POST hockey/player/_update_by_query ---------------------------------------------------------------- // CONSOLE +`Matcher.replaceAll` is just a call to Java's `Matcher`'s +http://docs.oracle.com/javase/8/docs/api/java/util/regex/Matcher.html#replaceAll-java.lang.String-[replaceAll] +method so it supports `$1` and `\1` for replacements: + +[source,js] +---------------------------------------------------------------- +POST hockey/player/_update_by_query +{ + "script": { + "lang": "painless", + "inline": "ctx._source.last = /n([aeiou])/.matcher(ctx._source.last).replaceAll('$1')" + } +} +---------------------------------------------------------------- +// CONSOLE + +If you need more control over replacements you can call `replaceAll` on a +`CharSequence` with a `Function` that builds the replacement. +This does not support `$1` or `\1` to access replacements because you already +have a reference to the matcher and can get them with `m.group(1)`. + +IMPORTANT: Calling `Matcher.find` inside of the function that builds the +replacement is rude and will likely break the replacement process. + +This will make all of the vowels in the hockey player's last names upper case: + +[source,js] +---------------------------------------------------------------- +POST hockey/player/_update_by_query +{ + "script": { + "lang": "painless", + "inline": "ctx._source.last = ctx._source.last.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))" + } +} +---------------------------------------------------------------- +// CONSOLE + +Or you can use the `CharSequence.replaceFirst` to make the first vowel in their +last names upper case: + +[source,js] +---------------------------------------------------------------- +POST hockey/player/_update_by_query +{ + "script": { + "lang": "painless", + "inline": "ctx._source.last = ctx._source.last.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))" + } +} +---------------------------------------------------------------- +// CONSOLE + Note: all of the `_update_by_query` examples above could really do with a `query` to limit the data that they pull back. While you *could* use a @@ -268,528 +312,27 @@ Note: all of the `_update_by_query` examples above could really do with a because script queries aren't able to use the inverted index to limit the documents that they have to check. -We intentionally don't allow scripts to call `Pattern.compile` to get a new -pattern on the fly because building a `Pattern` is (comparatively) slow. -Pattern literals (`/apattern/`) have fancy constant extraction so no matter -where they show up in the painless script they are built only when the script -is first used. It is fairly similar to how `String` literals work in Java. - [float] -[[modules-scripting-painless-regex-flags]] -==== Regular expression flags - -You can define flags on patterns in Painless by adding characters after the -trailing `/` like `/foo/i` or `/foo \w #comment/iUx`. Painless exposes all the -flags from -https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html[Java's Pattern class] -using these characters: - -[cols="<,<,<",options="header",] -|======================================================================= -| Character | Java Constant | Example -|`c` | CANON_EQ | `'å' ==~ /å/c` (open in hex editor to see) -|`i` | CASE_INSENSITIVE | `'A' ==~ /a/i` -|`l` | LITERAL | `'[a]' ==~ /[a]/l` -|`m` | MULTILINE | `'a\nb\nc' =~ /^b$/m` -|`s` | DOTALL (aka single line) | `'a\nb\nc' =~ /.b./s` -|`U` | UNICODE_CHARACTER_CLASS | `'Ɛ' ==~ /\\w/U` -|`u` | UNICODE_CASE | `'Ɛ' ==~ /ɛ/iu` -|`x` | COMMENTS (aka extended) | `'a' ==~ /a #comment/x` -|======================================================================= - - [[painless-api]] -[float] == Painless API -The following types are available for use in the Painless language. Most types and methods map directly to their Java equivalents--for more information, see the corresponding https://docs.oracle.com/javase/8/docs/api/java/lang/package-summary.html[Javadoc]. +The following Java packages are available for use in the Painless language: +* https://docs.oracle.com/javase/8/docs/api/java/lang/package-summary.html[java.lang] +* https://docs.oracle.com/javase/8/docs/api/java/math/package-summary.html[java.math] +* https://docs.oracle.com/javase/8/docs/api/java/text/package-summary.html[java.text] +* https://docs.oracle.com/javase/8/docs/api/java/time/package-summary.html[java.time] +* https://docs.oracle.com/javase/8/docs/api/java/time/chrono/package-summary.html[java.time.chrono] +* https://docs.oracle.com/javase/8/docs/api/java/time/format/package-summary.html[java.time.format] +* https://docs.oracle.com/javase/8/docs/api/java/time/temporal/package-summary.html[java.time.temporal] +* https://docs.oracle.com/javase/8/docs/api/java/time/zone/package-summary.html[java.time.zone] +* https://docs.oracle.com/javase/8/docs/api/java/util/package-summary.html[java.util] +* https://docs.oracle.com/javase/8/docs/api/java/util/function/package-summary.html[java.util.function] +* https://docs.oracle.com/javase/8/docs/api/java/util/regex/package-summary.html[java.util.regex] +* https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html[java.util.stream] -[float] -=== Dynamic Types +Note that unsafe classes and methods are not included, there is no support for: -* `def` (This type can be used to represent any other type.) - -[float] -=== Basic Types - -* `void` -* `boolean` -* `short` -* `char` -* `int` -* `long` -* `float` -* `double` - -[float] -=== Complex Types - -Non-static methods/members in superclasses are available to subclasses. -Generic types with unspecified generic parameters are parameters of type `def`. - ------ -ArithmeticException extends Exception - () ------ - ------ -ArrayList extends List - () ------ - ------ -ArrayList extends List - () ------ - ------ -ArrayList extends List - () ------ - ------ -Boolean extends Object - (boolean) - static Boolean valueOf(boolean) - boolean booleanValue() ------ - ------ -Character extends Object - (char) - static Character valueOf(char) - char charValue() - static char MIN_VALUE - static char MAX_VALUE ------ - ------ -CharSequence extends Object - char charAt(int) - int length() ------ - ------ -Collection extends Object - boolean add(def) - void clear() - boolean contains(def) - boolean isEmpty() - Iterator iterator() - boolean remove(def) - int size() ------ - ------ -Collection extends Object - boolean add(Object) - void clear() - boolean contains(Object) - boolean isEmpty() - Iterator iterator() - boolean remove(Object) - int size() ------ - ------ -Collection extends Object - boolean add(String) - void clear() - boolean contains(String) - boolean isEmpty() - Iterator iterator() - boolean remove(String) - int size() ------ - ------ -Double extends Number - (double) - static Double valueOf(double) - static double MIN_VALUE - static double MAX_VALUE ------ - ------ -Exception extends Object - String getMessage() ------ - ------ -Float extends Number - (float) - static Float valueOf(float) - static float MIN_VALUE - static float MAX_VALUE ------ - ------ -HashMap extends Map - () ------ - ------ -HashMap extends Map - () ------ - ------ -HashMap extends Map - () ------ - ------ -HashMap extends Map - () ------ - ------ -IllegalArgument extends Exception - () ------ - ------ -IllegalState extends Exception - () ------ - ------ -Integer extends Number - (int) - static Integer valueOf(int) - static int MIN_VALUE - static int MAX_VALUE ------ - ------ -Iterator extends Object - boolean hasNext() - def next() - void remove() ------ - ------ -Iterator extends Object - boolean hasNext() - String next() - void remove() ------ - ------ -List extends Collection - def set(int, def) - def get(int) - def remove(int) ------ - ------ -List extends Collection - Object set(int, Object) - Object get(int) - Object remove(int) ------ - ------ -List extends Collection - String set(int, String) - String get(int) - String remove(int) ------ - ------ -Long extends Number - (long) - static Long valueOf(long) - static long MIN_VALUE - static long MAX_VALUE ------ - ------ -Map extends Object - def put (def, def) - def get (def) - def remove (def) - boolean isEmpty() - int size() - boolean containsKey(def) - boolean containsValue(def) - Set keySet() - Collection values() ------ - ------ -Map extends Object - Object put (Object, Object) - Object get (Object) - Object remove (Object) - boolean isEmpty() - int size() - boolean containsKey(Object) - boolean containsValue(Object) - Set keySet() - Collection values() ------ - ------ -Map extends Object - def put (String, def) - def get (String) - def remove (String) - boolean isEmpty() - int size() - boolean containsKey(String) - boolean containsValue(def) - Set keySet() - Collection values() ------ - ------ -Map extends Object - Object put (String, Object) - Object get (String) - Object remove (String) - boolean isEmpty() - int size() - boolean containsKey(String) - boolean containsValue(Object) - Set keySet() - Collection values() ------ - ------ -Number extends Object - short shortValue() - short shortValue() - int intValue() - long longValue() - float floatValue() - double doubleValue() ------ - ------ -Object - String toString() - boolean equals(Object) - int hashCode() ------ - ------ -Set extends Collection ------ - ------ -Set extends Collection ------ - ------ -Set extends Collection ------ - ------ -Short extends Number - (short) - static Short valueOf(short) - static short MIN_VALUE - static short MAX_VALUE ------ - ------ -String extends CharSequence - (String) - int codePointAt(int) - int compareTo(String) - String concat(String) - boolean endsWith(String) - int indexOf(String, int) - boolean isEmpty() - String replace(CharSequence, CharSequence) - boolean startsWith(String) - String substring(int, int) - char[] toCharArray() - String trim() ------ - ------ -NumberFormatException extends Exception - () ------ - ------ -Void extends Object ------ - -[float] -==== Utility Classes - ------ -Math - static double abs(double) - static float fabs(float) - static long labs(long) - static int iabs(int) - static double acos(double) - static double asin(double) - static double atan(double) - static double atan2(double) - static double cbrt(double) - static double ceil(double) - static double cos(double) - static double cosh(double) - static double exp(double) - static double expm1(double) - static double floor(double) - static double hypt(double, double) - static double abs(double) - static double log(double) - static double log10(double) - static double log1p(double) - static double max(double, double) - static float fmax(float, float) - static long lmax(long, long) - static int imax(int, int) - static double min(double, double) - static float fmin(float, float) - static long lmin(long, long) - static int imin(int, int) - static double pow(double, double) - static double random() - static double rint(double) - static long round(double) - static double sin(double) - static double sinh(double) - static double sqrt(double) - static double tan(double) - static double tanh(double) - static double toDegrees(double) - static double toRadians(double) ------ - ------ -Utility - static boolean NumberToboolean(Number) - static char NumberTochar(Number) - static Boolean NumberToBoolean(Number) - static Short NumberToShort(Number) - static Character NumberToCharacter(Number) - static Integer NumberToInteger(Number) - static Long NumberToLong(Number) - static Float NumberToFloat(Number) - static Double NumberToDouble(Number) - static byte booleanTobyte(boolean) - static short booleanToshort(boolean) - static char booleanTochar(boolean) - static int booleanToint(boolean) - static long booleanTolong(boolean) - static float booleanTofloat(boolean) - static double booleanTodouble(boolean) - static Integer booleanToInteger(boolean) - static byte BooleanTobyte(Boolean) - static short BooleanToshort(Boolean) - static char BooleanTochar(Boolean) - static int BooleanToint(Boolean) - static long BooleanTolong(Boolean) - static float BooleanTofloat(Boolean) - static double BooleanTodouble(Boolean) - static Byte BooleanToByte(Boolean) - static Short BooleanToShort(Boolean) - static Character BooleanToCharacter(Boolean) - static Integer BooleanToInteger(Boolean) - static Long BooleanToLong(Boolean) - static Float BooleanToFloat(Boolean) - static Double BooleanToDouble(Boolean) - static boolean byteToboolean(byte) - static Short byteToShort(byte) - static Character byteToCharacter(byte) - static Integer byteToInteger(byte) - static Long byteToLong(byte) - static Float byteToFloat(byte) - static Double byteToDouble(byte) - static boolean ByteToboolean(Byte) - static char ByteTochar(Byte) - static boolean shortToboolean(short) - static Byte shortToByte(short) - static Character shortToCharacter(short) - static Integer shortToInteger(short) - static Long shortToLong(short) - static Float shortToFloat(short) - static Double shortToDouble(short) - static boolean ShortToboolean(Short) - static char ShortTochar(Short) - static boolean charToboolean(char) - static Byte charToByte(char) - static Short charToShort(char) - static Integer charToInteger(char) - static Long charToLong(char) - static Float charToFloat(char) - static Double charToDouble(char) - static boolean CharacterToboolean(Character) - static byte CharacterTobyte(Character) - static short CharacterToshort(Character) - static int CharacterToint(Character) - static long CharacterTolong(Character) - static float CharacterTofloat(Character) - static double CharacterTodouble(Character) - static Boolean CharacterToBoolean(Character) - static Byte CharacterToByte(Character) - static Short CharacterToShort(Character) - static Integer CharacterToInteger(Character) - static Long CharacterToLong(Character) - static Float CharacterToFloat(Character) - static Double CharacterToDouble(Character) - static boolean intToboolean(int) - static Byte intToByte(int) - static Short intToShort(int) - static Character intToCharacter(int) - static Long intToLong(int) - static Float intToFloat(int) - static Double intToDouble(int) - static boolean IntegerToboolean(Integer) - static char IntegerTochar(Integer) - static boolean longToboolean(long) - static Byte longToByte(long) - static Short longToShort(long) - static Character longToCharacter(long) - static Integer longToInteger(long) - static Float longToFloat(long) - static Double longToDouble(long) - static boolean LongToboolean(Long) - static char LongTochar(Long) - static boolean floatToboolean(float) - static Byte floatToByte(float) - static Short floatToShort(float) - static Character floatToCharacter(float) - static Integer floatToInteger(float) - static Long floatToLong(float) - static Double floatToDouble(float) - static boolean FloatToboolean(Float) - static char FloatTochar(Float) - static boolean doubleToboolean(double) - static Byte doubleToByte(double) - static Short doubleToShort(double) - static Character doubleToCharacter(double) - static Integer doubleToInteger(double) - static Long doubleToLong(double) - static Float doubleToFloat(double) - static boolean DoubleToboolean(Double) - static char DoubleTochar(Double) ------ - ------ -Def - static boolean defToboolean(def) - static byte defTobyte(def) - static short defToshort(def) - static char defTochar(def) - static int defToint(def) - static long defTolong(def) - static float defTofloat(def) - static double defTodouble(def) - static Boolean defToBoolean(def) - static Byte defToByte(def) - static Character defToCharacter(def) - static Integer defToInteger(def) - static Long defToLong(def) - static Float defToFloat(def) - static Double defToDouble(def) ------ +* Manipulation of processes and threads +* Input/Output +* Reflection diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index e7e4b8f5877..c6477b78d8b 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -83,9 +83,16 @@ First, each document is scored by the defined functions. The parameter `max`:: maximum score is used `min`:: minimum score is used -Because scores can be on different scales (for example, between 0 and 1 for decay functions but arbitrary for `field_value_factor`) and also because sometimes a different impact of functions on the score is desirable, the score of each function can be adjusted with a user defined `weight` (). The `weight` can be defined per function in the `functions` array (example above) and is multiplied with the score computed by the respective function. +Because scores can be on different scales (for example, between 0 and 1 for decay functions but arbitrary for `field_value_factor`) and also +because sometimes a different impact of functions on the score is desirable, the score of each function can be adjusted with a user defined +`weight`. The `weight` can be defined per function in the `functions` array (example above) and is multiplied with the score computed by +the respective function. If weight is given without any other function declaration, `weight` acts as a function that simply returns the `weight`. +In case `score_mode` is set to `avg` the individual scores will be combined by a **weighted** average. +For example, if two functions return score 1 and 2 and their respective weights are 3 and 4, then their scores will be combined as +`(1*3+2*4)/(3+4)` and **not** `(1*3+2*4)/2`. + The new score can be restricted to not exceed a certain limit by setting the `max_boost` parameter. The default for `max_boost` is FLT_MAX. @@ -124,7 +131,10 @@ simple sample: [source,js] -------------------------------------------------- "script_score" : { - "script" : "_score * doc['my_numeric_field'].value" + "script" : { + "lang": "painless", + "inline": "_score * doc['my_numeric_field'].value" + } } -------------------------------------------------- @@ -140,12 +150,12 @@ script, and provide parameters to it: -------------------------------------------------- "script_score": { "script": { - "lang": "lang", + "lang": "painless", "params": { "param1": value1, "param2": value2 }, - "inline": "_score * doc['my_numeric_field'].value / pow(param1, param2)" + "inline": "_score * doc['my_numeric_field'].value / Math.pow(params.param1, params.param2)" } } -------------------------------------------------- diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 896fa7ec3ee..0d079f6072c 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -367,7 +367,7 @@ GET /_search { "query": { "term" : { - "query.unknown_query" : "" + "query.extraction_result" : "failed" } } } diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index ee06a1b64bd..db82375abe0 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -13,7 +13,10 @@ GET /_search "bool" : { "must" : { "script" : { - "script" : "doc['num1'].value > 1" + "script" : { + "inline": "doc['num1'].value > 1", + "lang": "painless" + } } } } @@ -38,7 +41,8 @@ GET /_search "must" : { "script" : { "script" : { - "inline" : "doc['num1'].value > param1", + "inline" : "doc['num1'].value > params.param1", + "lang" : "painless", "params" : { "param1" : 5 } diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 90492dee33e..ea535263b7d 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -5,12 +5,14 @@ -- This section summarizes the changes in each release. +* <> * <> * <> * <> * <> -- +include::release-notes/5.0.0-alpha4.asciidoc[] include::release-notes/5.0.0-alpha3.asciidoc[] include::release-notes/5.0.0-alpha2.asciidoc[] include::release-notes/5.0.0-alpha1.asciidoc[] diff --git a/docs/reference/release-notes/5.0.0-alpha4.asciidoc b/docs/reference/release-notes/5.0.0-alpha4.asciidoc new file mode 100644 index 00000000000..b85d4ae32a5 --- /dev/null +++ b/docs/reference/release-notes/5.0.0-alpha4.asciidoc @@ -0,0 +1,353 @@ +[[release-notes-5.0.0-alpha4]] +== 5.0.0-alpha4 Release Notes + +Also see <>. + +IMPORTANT: This is an alpha release and is intended for _testing purposes only_. Indices created in this version will *not be compatible with Elasticsearch 5.0.0 GA*. Upgrading 5.0.0-alpha4 to any other version is not supported. + +[[breaking-5.0.0-alpha4]] +[float] +=== Breaking changes + +Aggregations:: +* Remove size 0 options in aggregations {pull}18854[#18854] (issue: {issue}18838[#18838]) + +CRUD:: +* Wait for changes to be visible by search {pull}17986[#17986] (issue: {issue}1063[#1063]) + +Core:: +* Register thread pool settings {pull}18674[#18674] (issues: {issue}18613[#18613], {issue}9216[#9216]) +* Remove cluster name from data path {pull}18554[#18554] (issue: {issue}17810[#17810]) + +Highlighting:: +* Register Highlighter instances instead of classes {pull}18859[#18859] + +Inner Hits:: +* Also do not serialize `_index` key in search response for parent/child inner hits {pull}19011[#19011] +* Don't include `_id`, `_type` and `_index` keys in search response for inner hits {pull}18995[#18995] (issue: {issue}18091[#18091]) +* Nested inner hits shouldn't use relative paths {pull}18567[#18567] (issue: {issue}16653[#16653]) + +Internal:: +* Cleanup ClusterService dependencies and detached from Guice {pull}18941[#18941] +* Simplify SubFetchPhase interface {pull}18881[#18881] +* Simplify FetchSubPhase registration and detach it from Guice {pull}18862[#18862] + +Java API:: +* Remove setRefresh {pull}18752[#18752] (issue: {issue}1063[#1063]) + +Mapping:: +* Remove `_timestamp` and `_ttl` on 5.x indices. {pull}18980[#18980] (issue: {issue}18280[#18280]) + +Packaging:: +* Remove allow running as root {pull}18694[#18694] (issue: {issue}18688[#18688]) + +Plugins:: +* Fail to start if plugin tries broken onModule {pull}19025[#19025] +* Simplify ScriptModule and script registration {pull}18903[#18903] +* Cut over settings registration to a pull model {pull}18890[#18890] +* Plugins cleanup {pull}18594[#18594] (issue: {issue}18588[#18588]) + +Scripting:: +* Move search template to lang-mustache module {pull}18765[#18765] (issue: {issue}17906[#17906]) + +Search:: +* Remove only node preference {pull}18875[#18875] (issue: {issue}18822[#18822]) +* Add search preference to prefer multiple nodes {pull}18872[#18872] (issue: {issue}18822[#18822]) + +Settings:: +* Rename boostrap.mlockall to bootstrap.memory_lock {pull}18669[#18669] + +Snapshot/Restore:: +* Change the default of `include_global_state` from true to false for snapshot restores {pull}18773[#18773] (issue: {issue}18569[#18569]) + + + +[[feature-5.0.0-alpha4]] +[float] +=== New features + +Aggregations:: +* Adds aggregation profiling to the profile API {pull}18414[#18414] (issue: {issue}10538[#10538]) +* New Matrix Stats Aggregation module {pull}18300[#18300] (issue: {issue}16826[#16826]) + +Index APIs:: +* Add rollover API to switch index aliases given some predicates {pull}18732[#18732] (issue: {issue}18647[#18647]) + +Mapping:: +* Expose half-floats. {pull}18887[#18887] + +REST:: +* Low level Rest Client {pull}18735[#18735] (issue: {issue}7743[#7743]) + +Scroll:: +* Add the ability to partition a scroll in multiple slices. {pull}18237[#18237] (issue: {issue}13494[#13494]) + +Store:: +* Expose MMapDirectory.preLoad(). {pull}18880[#18880] +* Add primitive to shrink an index into a single shard {pull}18270[#18270] + + + +[[enhancement-5.0.0-alpha4]] +[float] +=== Enhancements + +Aggregations:: +* Automatically set the collection mode to breadth_first in the terms aggregation when the cardinality of the field is unknown or smaller than the requested size. {pull}18779[#18779] (issue: {issue}9825[#9825]) +* Rename PipelineAggregatorBuilder to PipelineAggregationBuilder. {pull}18677[#18677] (issue: {issue}18377[#18377]) +* AggregatorBuilder and PipelineAggregatorBuilder do not need generics. {pull}18368[#18368] (issue: {issue}18133[#18133]) + +Allocation:: +* Allow `_shrink` to N shards if source shards is a multiple of N {pull}18699[#18699] +* Only filter intial recovery (post API) when shrinking an index {pull}18661[#18661] +* Estimate shard size for shrinked indices {pull}18659[#18659] +* Only fail relocation target shard if failing source shard is a primary {pull}18574[#18574] (issue: {issue}16144[#16144]) +* Simplify delayed shard allocation {pull}18351[#18351] (issue: {issue}18293[#18293]) + +Analysis:: +* Add a MultiTermAwareComponent marker interface to analysis factories. {pull}19028[#19028] (issues: {issue}18064[#18064], {issue}9978[#9978]) +* Add Flags Parameter for Char Filter {pull}18363[#18363] (issue: {issue}18362[#18362]) + +Cache:: +* Cache FieldStats in the request cache {pull}18768[#18768] (issue: {issue}18717[#18717]) + +Cluster:: +* Index creation does not cause the cluster health to go RED {pull}18737[#18737] (issues: {issue}9106[#9106], {issue}9126[#9126]) +* Cluster Health class improvements {pull}18673[#18673] + +Core:: +* Read Elasticsearch manifest via URL {pull}18999[#18999] (issue: {issue}18996[#18996]) +* Throw if the local node is not set {pull}18963[#18963] (issue: {issue}18962[#18962]) +* Improve performance of applyDeletedShards {pull}18788[#18788] (issue: {issue}18776[#18776]) +* Bootstrap check for OnOutOfMemoryError and seccomp {pull}18756[#18756] (issue: {issue}18736[#18736]) + +Dates:: +* Improve TimeZoneRoundingTests error messages {pull}18895[#18895] +* Improve TimeUnitRounding for edge cases and DST transitions {pull}18589[#18589] + +Expressions:: +* improve date api for expressions/painless fields {pull}18658[#18658] + +Index APIs:: +* Add Shrink request source parser to parse create index request body {pull}18802[#18802] + +Index Templates:: +* Parse and validate mappings on index template creation {pull}8802[#8802] (issue: {issue}2415[#2415]) + +Ingest:: +* Add `ignore_failure` option to all ingest processors {pull}18650[#18650] (issue: {issue}18493[#18493]) +* new ScriptProcessor for Ingest {pull}18193[#18193] + +Internal:: +* Hot methods redux {pull}19016[#19016] (issue: {issue}16725[#16725]) +* Remove forked joda time BaseDateTime class {pull}18953[#18953] +* Support optional ctor args in ConstructingObjectParser {pull}18725[#18725] +* Remove thread pool from page cache recycler {pull}18664[#18664] (issue: {issue}18613[#18613]) + +Java API:: +* Switch QueryBuilders to new MatchPhraseQueryBuilder {pull}18753[#18753] + +Logging:: +* Throw IllegalStateException when handshake fails due to version or cluster mismatch {pull}18676[#18676] + +Mapping:: +* Upgrade `string` fields to `text`/`keyword` even if `include_in_all` is set. {pull}19004[#19004] (issue: {issue}18974[#18974]) + +Network:: +* Exclude admin / diagnostic requests from HTTP request limiting {pull}18833[#18833] (issues: {issue}17951[#17951], {issue}18145[#18145]) +* Do not start scheduled pings until transport start {pull}18702[#18702] + +Packaging:: +* Remove explicit parallel new GC flag {pull}18767[#18767] +* Use JAVA_HOME or java.exe in PATH like the Linux scripts do {pull}18685[#18685] (issue: {issue}4913[#4913]) + +Percolator:: +* Add percolator query extraction support for dismax query {pull}18845[#18845] +* Improve percolate query performance by not verifying certain candidate matches {pull}18696[#18696] +* Improve percolator query term extraction {pull}18610[#18610] + +Plugin Lang Painless:: +* Painless Initializers {pull}19012[#19012] +* Add augmentation {pull}19003[#19003] +* Infer lambda arguments/return type {pull}18983[#18983] +* Fix explicit casts and improve tests. {pull}18958[#18958] +* Add lambda captures {pull}18954[#18954] +* improve Debugger to print code even if it hits exception {pull}18932[#18932] (issue: {issue}1[#1]) +* Move semicolon hack into lexer {pull}18931[#18931] +* Add flag support to regexes {pull}18927[#18927] +* improve lambda syntax (allow single expression) {pull}18924[#18924] +* Remove useless dropArguments in megamorphic cache {pull}18913[#18913] +* non-capturing lambda support {pull}18911[#18911] (issue: {issue}18824[#18824]) +* fix bugs in operators and more improvements for the dynamic case {pull}18899[#18899] +* improve unary operators and cleanup tests {pull}18867[#18867] (issue: {issue}18849[#18849]) +* Add support for the find operator (=~) and the match operator (==~) {pull}18858[#18858] +* Remove casts and boxing for dynamic math {pull}18849[#18849] (issue: {issue}18847[#18847]) +* Refactor def math {pull}18847[#18847] +* Add support for /regex/ {pull}18842[#18842] +* Array constructor references {pull}18831[#18831] +* Method references to user functions {pull}18828[#18828] +* Add } as a delimiter. {pull}18827[#18827] (issue: {issue}18821[#18821]) +* Add Lambda Stub Node {pull}18824[#18824] +* Add capturing method references {pull}18818[#18818] (issue: {issue}18748[#18748]) +* Add Functions to Painless {pull}18810[#18810] +* Add Method to Get New MethodWriters {pull}18771[#18771] +* Static For Each {pull}18757[#18757] +* Method reference support {pull}18748[#18748] (issue: {issue}18578[#18578]) +* Add support for the new Java 9 MethodHandles#arrayLength() factory {pull}18734[#18734] +* Improve painless compile-time exceptions {pull}18711[#18711] (issue: {issue}18600[#18600]) +* add java.time packages to painless whitelist {pull}18621[#18621] +* Add Function Reference Stub to Painless {pull}18578[#18578] + +Plugins:: +* Add did-you-mean for plugin cli {pull}18942[#18942] (issue: {issue}18896[#18896]) +* Plugins: Remove name() and description() from api {pull}18906[#18906] +* Emit nicer error message when trying to install unknown plugin {pull}18876[#18876] (issue: {issue}17226[#17226]) + +Query DSL:: +* Treat zero token in `common` terms query as MatchNoDocsQuery {pull}18656[#18656] +* Handle empty query bodies at parse time and remove EmptyQueryBuilder {pull}17624[#17624] (issues: {issue}17540[#17540], {issue}17541[#17541]) + +REST:: +* Adding status field in _msearch error request bodies {pull}18586[#18586] (issue: {issue}18013[#18013]) + +Recovery:: +* index shard should be able to cancel check index on close. {pull}18839[#18839] (issue: {issue}12011[#12011]) + +Reindex API:: +* Implement ctx.op = "delete" on _update_by_query and _reindex {pull}18614[#18614] (issue: {issue}18043[#18043]) + +Scripting:: +* Compile each Groovy script in its own classloader {pull}18918[#18918] (issue: {issue}18572[#18572]) +* Include script field even if it value is null {pull}18384[#18384] (issue: {issue}16408[#16408]) + +Scroll:: +* Add an index setting to limit the maximum number of slices allowed in a scroll request. {pull}18782[#18782] + +Search:: +* Change default similarity to BM25 {pull}18948[#18948] (issue: {issue}18944[#18944]) +* Add a parameter to cap the number of searches the msearch api will concurrently execute {pull}18721[#18721] + +Sequence IDs:: +* Persist sequence number checkpoints {pull}18949[#18949] (issue: {issue}10708[#10708]) +* Add sequence numbers to cat shards API {pull}18772[#18772] + +Settings:: +* Improve error message if a setting is not found {pull}18920[#18920] (issue: {issue}18663[#18663]) +* Cleanup placeholder replacement {pull}17335[#17335] + +Snapshot/Restore:: +* Adds UUIDs to snapshots {pull}18228[#18228] (issue: {issue}18156[#18156]) +* Clarify the semantics of the BlobContainer interface {pull}18157[#18157] (issue: {issue}15580[#15580]) + +Stats:: +* Add total_indexing_buffer/_in_bytes to nodes info API {pull}18914[#18914] (issue: {issue}18651[#18651]) +* Allow FieldStatsRequest to disable cache {pull}18900[#18900] +* Remove index_writer_max_memory stat from segment stats {pull}18651[#18651] (issues: {issue}14121[#14121], {issue}7440[#7440]) +* Move DocStats under Engine to get more accurate numbers {pull}18587[#18587] + +Task Manager:: +* Fetch result when wait_for_completion {pull}18905[#18905] +* Create get task API that falls back to the .tasks index {pull}18682[#18682] +* Add ability to store results for long running tasks {pull}17928[#17928] + +Translog:: +* Beef up Translog testing with random channel exceptions {pull}18997[#18997] +* Do not replay into translog on local recovery {pull}18547[#18547] + + + +[[bug-5.0.0-alpha4]] +[float] +=== Bug fixes + +Allocation:: +* Fix recovery throttling to properly handle relocating non-primary shards {pull}18701[#18701] (issue: {issue}18640[#18640]) + +CAT API:: +* Fix merge stats rendering in RestIndicesAction {pull}18720[#18720] + +CRUD:: +* Squash a race condition in RefreshListeners {pull}18806[#18806] + +Circuit Breakers:: +* Never trip circuit breaker in liveness request {pull}18627[#18627] (issue: {issue}17951[#17951]) + +Cluster:: +* Fix block checks when no indices are specified {pull}19047[#19047] (issue: {issue}8105[#8105]) +* Acknowledge index deletion requests based on standard cluster state acknowledgment {pull}18602[#18602] (issues: {issue}16442[#16442], {issue}18558[#18558]) + +Core:: +* Throw exception if using a closed transport client {pull}18722[#18722] (issue: {issue}18708[#18708]) + +Dates:: +* Fix invalid rounding value for TimeIntervalRounding close to DST transitions {pull}18800[#18800] +* Fix problem with TimeIntervalRounding on DST end {pull}18780[#18780] + +Expressions:: +* replace ScriptException with a better one {pull}18600[#18600] + +Ingest:: +* Fix ignore_failure behavior in _simulate?verbose and more cleanup {pull}18987[#18987] + +Internal:: +* Fix filtering of node ids for TransportNodesAction {pull}18634[#18634] (issue: {issue}18618[#18618]) + +Mapping:: +* Better error message when mapping configures null {pull}18809[#18809] (issue: {issue}18803[#18803]) +* Process dynamic templates in order. {pull}18638[#18638] (issues: {issue}18625[#18625], {issue}2401[#2401]) + +Packaging:: +* Remove extra bin/ directory in bin folder {pull}18630[#18630] + +Plugin Lang Painless:: +* Fix compound assignment with string concats {pull}18933[#18933] (issue: {issue}18929[#18929]) +* Fix horrible capture {pull}18907[#18907] (issue: {issue}18899[#18899]) +* Fix Casting Bug {pull}18871[#18871] + +Query DSL:: +* Make parsing of bool queries stricter {pull}19052[#19052] (issue: {issue}19034[#19034]) + +REST:: +* Get XContent params from request in Nodes rest actions {pull}18860[#18860] (issue: {issue}18794[#18794]) + +Reindex API:: +* Fix a race condition in reindex's rethrottle {pull}18731[#18731] (issue: {issue}18744[#18744]) + +Search:: +* Require timeout units when parsing query body {pull}19077[#19077] (issue: {issue}19075[#19075]) +* Close SearchContext if query rewrite failed {pull}18727[#18727] + +Settings:: +* Register "cloud.node.auto_attributes" setting in EC2 discovery plugin {pull}18678[#18678] + +Snapshot/Restore:: +* Better handling of an empty shard's segments_N file {pull}18784[#18784] (issue: {issue}18707[#18707]) + +Stats:: +* Fix sync flush total shards statistics {pull}18766[#18766] + +Translog:: +* Fix translog replay multiple operations same doc {pull}18611[#18611] (issues: {issue}18547[#18547], {issue}18623[#18623]) + + + +[[upgrade-5.0.0-alpha4]] +[float] +=== Upgrades + +Core:: +* Upgrade to Lucene 6.1.0. {pull}18926[#18926] +* Upgrade to lucene-6.1.0-snapshot-3a57bea. {pull}18786[#18786] +* Upgrade to Lucene 6.0.1. {pull}18648[#18648] (issues: {issue}17535[#17535], {issue}28[#28]) + +Dates:: +* Upgrade joda-time to 2.9.4 {pull}18609[#18609] (issues: {issue}14524[#14524], {issue}18017[#18017]) + +Packaging:: +* Upgrade JNA to 4.2.2 and remove optionality {pull}19045[#19045] (issue: {issue}13245[#13245]) + +Plugin Discovery EC2:: +* Update aws sdk to 1.10.69 and add use_throttle_retries repository setting {pull}17784[#17784] (issues: {issue}538[#538], {issue}586[#586], {issue}589[#589]) + + + diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 25820d04800..150b1b93a36 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -4,25 +4,12 @@ experimental[] The Profile API provides detailed timing information about the execution of individual components -in a query. It gives the user insight into how queries are executed at a low level so that -the user can understand why certain queries are slow, and take steps to improve their slow queries. +in a search request. It gives the user insight into how search requests are executed at a low level so that +the user can understand why certain requests are slow, and take steps to improve them. -The output from the Profile API is *very* verbose, especially for complicated queries executed across +The output from the Profile API is *very* verbose, especially for complicated requests executed across many shards. Pretty-printing the response is recommended to help understand the output -[NOTE] -======================================= -The details provided by the Profile API directly expose Lucene class names and concepts, which means -that complete interpretation of the results require fairly advanced knowledge of Lucene. This -page attempts to give a crash-course in how Lucene executes queries so that you can use the Profile API to successfully -diagnose and debug queries, but it is only an overview. For complete understanding, please refer -to Lucene's documentation and, in places, the code. - -With that said, a complete understanding is often not required to fix a slow query. It is usually -sufficient to see that a particular component of a query is slow, and not necessarily understand why -the `advance` phase of that query is the cause, for example. -======================================= - [float] === Usage @@ -35,7 +22,7 @@ curl -XGET 'localhost:9200/_search' -d '{ "query" : { "match" : { "message" : "search test" } } -} +}' -------------------------------------------------- <1> Setting the top-level `profile` parameter to `true` will enable profiling for the search @@ -141,7 +128,8 @@ First, the overall structure of the profile response is as follows: "rewrite_time": 185002, <3> "collector": [...] <4> } - ] + ], + "aggregations": [...] <5> } ] } @@ -152,6 +140,7 @@ by a unique ID <2> Each profile contains a section which holds details about the query execution <3> Each profile has a single time representing the cumulative rewrite time <4> Each profile also contains a section about the Lucene Collectors which run the search +<5> Each profile contains a section which holds the details about the aggregation execution Because a search request may be executed against one or more shards in an index, and a search may cover one or more indices, the top level element in the profile response is an array of `shard` objects. @@ -164,12 +153,26 @@ But occasionally multiple searches will be executed, such as including a global a secondary "match_all" query for the global context). Inside each `search` object there will be two arrays of profiled information: -a `query` array and a `collector` array. In the future, more sections may be added, such as `suggest`, `highlight`, -`aggregations`, etc +a `query` array and a `collector` array. Alongside the `search` object is an `aggregations` object that contains the profile information for the aggregations. In the future, more sections may be added, such as `suggest`, `highlight`, etc There will also be a `rewrite` metric showing the total time spent rewriting the query (in nanoseconds). -=== `query` Section +=== Profiling Queries + +[NOTE] +======================================= +The details provided by the Profile API directly expose Lucene class names and concepts, which means +that complete interpretation of the results require fairly advanced knowledge of Lucene. This +page attempts to give a crash-course in how Lucene executes queries so that you can use the Profile API to successfully +diagnose and debug queries, but it is only an overview. For complete understanding, please refer +to Lucene's documentation and, in places, the code. + +With that said, a complete understanding is often not required to fix a slow query. It is usually +sufficient to see that a particular component of a query is slow, and not necessarily understand why +the `advance` phase of that query is the cause, for example. +======================================= + +==== `query` Section The `query` section contains detailed timing of the query tree executed by Lucene on a particular shard. The overall structure of this query tree will resemble your original Elasticsearch query, but may be slightly @@ -217,9 +220,9 @@ that in a moment. Finally, the `"children"` array lists any sub-queries that ma values ("search test"), our BooleanQuery holds two children TermQueries. They have identical information (type, time, breakdown, etc). Children are allowed to have their own children. -==== Timing Breakdown +===== Timing Breakdown -The `breakdown` component lists detailed timing statistics about low-level Lucene execution: +The `"breakdown"` component lists detailed timing statistics about low-level Lucene execution: [source,js] -------------------------------------------------- @@ -235,14 +238,14 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen -------------------------------------------------- Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall -`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is +`"time"` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Lucene is actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, the breakdown is inclusive of all children times. The meaning of the stats are as follows: [float] -=== All parameters: +==== All parameters: [horizontal] `create_weight`:: @@ -303,7 +306,7 @@ The meaning of the stats are as follows: This records the time taken to score a particular document via it's Scorer -=== `collectors` Section +==== `collectors` Section The Collectors portion of the response shows high-level execution details. Lucene works by defining a "Collector" which is responsible for coordinating the traversal, scoring and collection of matching documents. Collectors @@ -379,7 +382,7 @@ For reference, the various collector reason's are: -=== `rewrite` Section +==== `rewrite` Section All queries in Lucene undergo a "rewriting" process. A query (and its sub-queries) may be rewritten one or more times, and the process continues until the query stops changing. This process allows Lucene to perform @@ -391,7 +394,7 @@ The rewriting process is complex and difficult to display, since queries can cha showing the intermediate results, the total rewrite time is simply displayed as a value (in nanoseconds). This value is cumulative and contains the total time for all queries being rewritten. -=== A more complex example +==== A more complex example To demonstrate a slightly more complex query and the associated results, we can profile the following query: @@ -563,28 +566,7 @@ The Collector tree is fairly straightforward, showing how a single MultiCollecto to execute the post_filter (and in turn wraps the normal scoring SimpleCollector), a BucketCollector to run all scoped aggregations. In the MatchAll search, there is a single GlobalAggregator to run the global aggregation. -=== Performance Notes - -Like any profiler, the Profile API introduce a non-negligible overhead to query execution. The act of instrumenting -low-level method calls such as `advance` and `next_doc` can be fairly expensive, since these methods are called -in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not -be compared against non-profiled query times. Profiling is just a diagnostic tool. - -There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This -could cause some queries to report larger relative times than their non-profiled counterparts, but in general should -not have a drastic effect compared to other components in the profiled query. - -=== Limitations - -- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` -- Detailed breakdown for aggregations is not currently available past the high-level overview provided -from the Collectors -- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were -never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed -diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures or -other bugs, please report them! - -=== Understanding MultiTermQuery output +==== Understanding MultiTermQuery output A special note needs to be made about the `MultiTermQuery` class of queries. This includes wildcards, regex and fuzzy queries. These queries emit very verbose responses, and are not overly structured. @@ -602,3 +584,139 @@ just not the physical layout in the response, so it is sufficient to just analyz ignore it's children if you find the details too tricky to interpret. Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :) + +=== Profiling Aggregations + +==== `aggregations` Section + +The `aggregations` section contains detailed timing of the aggregation tree executed by a particular shard. +The overall structure of this aggregation tree will resemble your original Elasticsearch request. Let's consider +the following example aggregations request: + +[source,js] +-------------------------------------------------- +curl -XGET "http://localhost:9200/house-prices/_search" -d' +{ + "profile": true, + "size": 0, + "aggs": { + "property_type": { + "terms": { + "field": "propertyType" + }, + "aggs": { + "avg_price": { + "avg": { + "field": "price" + } + } + } + } + } +}' +-------------------------------------------------- + +Which yields the following aggregation profile output + +[source,js] +-------------------------------------------------- +"aggregations": [ + { + "type": "org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator", + "description": "property_type", + "time": "4280.456978ms", + "breakdown": { + "reduce": 0, + "build_aggregation": 49765, + "initialise": 52785, + "collect": 3155490036 + }, + "children": [ + { + "type": "org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator", + "description": "avg_price", + "time": "1124.864392ms", + "breakdown": { + "reduce": 0, + "build_aggregation": 1394, + "initialise": 2883, + "collect": 1124860115 + } + } + ] + } +] +-------------------------------------------------- + +From the profile structure we can see our `property_type` terms aggregation which is internally represented by the +`GlobalOrdinalsStringTermsAggregator` class and the sub aggregator `avg_price` which is internally represented by the `AvgAggregator` class. The `type` field displays the class used internally to represent the aggregation. The `description` field displays the name of the aggregation. + +The `"time"` field shows that it took ~4 seconds for the entire aggregation to execute. The recorded time is inclusive +of all children. + +The `"breakdown"` field will give detailed stats about how the time was spent, we'll look at +that in a moment. Finally, the `"children"` array lists any sub-aggregations that may be present. Because we have an `avg_price` aggregation as a sub-aggregation to the `property_type` aggregation we see it listed as a child of the `property_type` aggregation. the two aggregation outputs have identical information (type, time, +breakdown, etc). Children are allowed to have their own children. + +===== Timing Breakdown + +The `"breakdown"` component lists detailed timing statistics about low-level Lucene execution: + +[source,js] +-------------------------------------------------- +"breakdown": { + "reduce": 0, + "build_aggregation": 49765, + "initialise": 52785, + "collect": 3155490036 +} +-------------------------------------------------- + +Timings are listed in wall-clock nanoseconds and are not normalized at all. All caveats about the overall +`time` apply here. The intention of the breakdown is to give you a feel for A) what machinery in Elasticsearch is +actually eating time, and B) the magnitude of differences in times between the various components. Like the overall time, +the breakdown is inclusive of all children times. + +The meaning of the stats are as follows: + +[float] +==== All parameters: + +[horizontal] +`initialise`:: + + This times how long it takes to create and initialise the aggregation before starting to collect documents. + +`collect`:: + + This represents the cumulative time spent in the collect phase of the aggregation. This is where matching documents are passed to the aggregation and the state of the aggregator is updated based on the information contained in the documents. + +`build_aggregation`:: + + This represents the time spent creating the shard level results of the aggregation ready to pass back to the reducing node after the collection of documents is finished. + +`reduce`:: + + This is not currently used and will always report `0`. Currently aggregation profiling only times the shard level parts of the aggregation execution. Timing of the reduce phase will be added later. + +=== Profiling Considerations + +==== Performance Notes + +Like any profiler, the Profile API introduces a non-negligible overhead to search execution. The act of instrumenting +low-level method calls such as `collect`, `advance` and `next_doc` can be fairly expensive, since these methods are called +in tight loops. Therefore, profiling should not be enabled in production settings by default, and should not +be compared against non-profiled query times. Profiling is just a diagnostic tool. + +There are also cases where special Lucene optimizations are disabled, since they are not amenable to profiling. This +could cause some queries to report larger relative times than their non-profiled counterparts, but in general should +not have a drastic effect compared to other components in the profiled query. + +==== Limitations + +- Profiling statistics are currently not available for suggestions, highlighting, `dfs_query_then_fetch` +- Profiling of the reduce phase of aggregation is currently not available +- The Profiler is still highly experimental. The Profiler is instrumenting parts of Lucene that were +never designed to be exposed in this manner, and so all results should be viewed as a best effort to provide detailed +diagnostics. We hope to improve this over time. If you find obviously wrong numbers, strange query structures or +other bugs, please report them! diff --git a/docs/reference/search/request/rescore.asciidoc b/docs/reference/search/request/rescore.asciidoc index 8bab1cf1f5c..488884ef8da 100644 --- a/docs/reference/search/request/rescore.asciidoc +++ b/docs/reference/search/request/rescore.asciidoc @@ -118,7 +118,10 @@ curl -s -XPOST 'localhost:9200/_search' -d '{ "rescore_query" : { "function_score" : { "script_score": { - "script": "log10(doc['numeric'].value + 2)" + "script": { + "lang": "painless", + "inline": "Math.log10(doc['numeric'].value + 2)" + } } } } diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index 6e054f02e1c..b544c79e4f2 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -13,10 +13,14 @@ GET /_search }, "script_fields" : { "test1" : { - "script" : "doc['my_field_name'].value * 2" + "script" : { + "lang": "painless", + "inline": "doc['my_field_name'].value * 2" + } }, "test2" : { "script" : { + "lang": "painless", "inline": "doc['my_field_name'].value * factor", "params" : { "factor" : 2.0 diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index fd3dbffc1d6..d0a50f0ebfd 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -378,7 +378,8 @@ GET /_search "_script" : { "type" : "number", "script" : { - "inline": "doc['field_name'].value * factor", + "lang": "painless", + "inline": "doc['field_name'].value * params.factor", "params" : { "factor" : 1.1 } diff --git a/docs/reference/search/search-template.asciidoc b/docs/reference/search/search-template.asciidoc index 8533984428b..359b692f528 100644 --- a/docs/reference/search/search-template.asciidoc +++ b/docs/reference/search/search-template.asciidoc @@ -89,6 +89,89 @@ which is rendered as: } ------------------------------------------ + +[float] +===== Concatenating array of values + +The `{{#join}}array{{/join}}` function can be used to concatenate the +values of an array as a comma delimited string: + +[source,js] +------------------------------------------ +GET /_search/template +{ + "inline": { + "query": { + "match": { + "emails": "{{#join}}emails{{/join}}" + } + } + }, + "params": { + "emails": [ "username@email.com", "lastname@email.com" ] + } +} +------------------------------------------ + +which is rendered as: + +[source,js] +------------------------------------------ +{ + "query" : { + "match" : { + "emails" : "username@email.com,lastname@email.com" + } + } +} +------------------------------------------ + +The function also accepts a custom delimiter: + +[source,js] +------------------------------------------ +GET /_search/template +{ + "inline": { + "query": { + "range": { + "born": { + "gte" : "{{date.min}}", + "lte" : "{{date.max}}", + "format": "{{#join delimiter='||'}}date.formats{{/join delimiter='||'}}" + } + } + } + }, + "params": { + "date": { + "min": "2016", + "max": "31/12/2017", + "formats": ["dd/MM/yyyy", "yyyy"] + } + } +} +------------------------------------------ + +which is rendered as: + +[source,js] +------------------------------------------ +{ + "query" : { + "range" : { + "born" : { + "gte" : "2016", + "lte" : "31/12/2017", + "format" : "dd/MM/yyyy||yyyy" + } + } + } +} + +------------------------------------------ + + [float] ===== Default values @@ -140,6 +223,46 @@ for `end`: } ------------------------------------------ +[float] +===== Converting parameters to JSON + +The `{{toJson}}parameter{{/toJson}}` function can be used to convert parameters +like maps and array to their JSON representation: + +[source,js] +------------------------------------------ +{ + "inline": "{\"query\":{\"bool\":{\"must\": {{#toJson}}clauses{{/toJson}} }}}", + "params": { + "clauses": [ + { "term": "foo" }, + { "term": "bar" } + ] + } +} +------------------------------------------ + +which is rendered as: + +[source,js] +------------------------------------------ +{ + "query" : { + "bool" : { + "must" : [ + { + "term" : "foo" + }, + { + "term" : "bar" + } + ] + } + } +} +------------------------------------------ + + [float] ===== Conditional clauses diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 52bf5ffcbff..68f73fc96b8 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -26,7 +26,7 @@ setting, as follows: [source,sh] ------------------------------- -./bin/elasticsearch -Ees.path.conf=/path/to/my/config/ +./bin/elasticsearch -Epath.conf=/path/to/my/config/ ------------------------------- [float] diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index 24035e0772d..802c380b4a7 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -55,6 +55,14 @@ If you encounter an issue, https://github.com/elastic/elasticsearch/issues[pleas We are committed to tracking down and fixing all the issues that are posted. +[float] +==== Jepsen Tests + +The Jepsen platform is specifically designed to test distributed systems. It is not a single test and is regularly adapted +to create new scenarios. We have ported all published scenarios to our testing infrastructure. Of course +as the system evolves, new scenarios can come up that are not yet covered. We are committed to investigating all new scenarios and will +report issues that we find on this page and in our GitHub repository. + [float] === Better request retry mechanism when nodes are disconnected (STATUS: ONGOING) @@ -90,7 +98,7 @@ space. The following issues have been identified: * Set a hard limit on `from`/`size` parameters {GIT}9311[#9311]. (STATUS: DONE, v2.1.0) * Prevent combinatorial explosion in aggregations from causing OOM {GIT}8081[#8081]. (STATUS: ONGOING) * Add the byte size of each hit to the request circuit breaker {GIT}9310[#9310]. (STATUS: ONGOING) -* Limit the size of individual requests and also add a circuit breaker for the total memory used by in-flight request objects {GIT}16011[#16011]. (STATUS: ONGOING) +* Limit the size of individual requests and also add a circuit breaker for the total memory used by in-flight request objects {GIT}16011[#16011]. (STATUS: DONE, v5.0.0) Other safeguards are tracked in the meta-issue {GIT}11511[#11511]. @@ -102,17 +110,31 @@ Indices stats and indices segments requests reach out to all nodes that have sha while the stats request arrives will make that part of the request fail and are just ignored in the overall stats result. {GIT}13719[#13719] [float] -=== Jepsen Test Failures (STATUS: ONGOING) +=== Documentation of guarantees and handling of failures (STATUS: ONGOING) -We have increased our test coverage to include scenarios tested by Jepsen. We make heavy use of randomization to expand on the scenarios that can be tested and to introduce new error conditions. You can follow the work on the master branch of the https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java[`DiscoveryWithServiceDisruptionsIT` class], where we will add more tests as time progresses. +This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch and what happens +in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test + will document the expected results, the associated test code, and an explicit PASS or FAIL status for each simulated case. [float] -=== Document guarantees and handling of failure (STATUS: ONGOING) +=== Run Jepsen (STATUS: ONGOING) + +We have ported all of the known scenarios in the Jepsen blogs to our testing infrastructure. The new tests are run continuously in our +testing farm and are passing. We are also working on running Jepsen independently to verify that no failures are found. -This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch, and what happens in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test will document the expected results, the associated test code and an explicit PASS or FAIL status for each simulated case. == Unreleased +[float] +=== Port Jepsen tests to our testing framework (STATUS: UNRELEASED, V5.0.0) + +We have increased our test coverage to include scenarios tested by Jepsen, as described in the Elasticsearch related blogs. We make heavy +use of randomization to expand on the scenarios that can be tested and to introduce new error conditions. +You can follow the work on the master branch of the +https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java[`DiscoveryWithServiceDisruptionsIT` class], +where the `testAckedIndexing` test was specifically added to cover known Jepsen related scenarios. + + [float] === Loss of documents during network partition (STATUS: UNRELEASED, v5.0.0) diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java index edef75389c8..20be7e72888 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java @@ -36,8 +36,8 @@ import java.util.Map; */ public class InternalMatrixStats extends InternalMetricsAggregation implements MatrixStats { - public final static Type TYPE = new Type("matrix_stats"); - public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { + public static final Type TYPE = new Type("matrix_stats"); + public static final AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { @Override public InternalMatrixStats readResult(StreamInput in) throws IOException { InternalMatrixStats result = new InternalMatrixStats(); diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java index 96b7b74ab4e..1ae29e65761 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java @@ -34,9 +34,9 @@ import java.util.Map; */ class MatrixStatsResults implements Writeable { /** object holding results - computes results in place */ - final protected RunningStats results; + protected final RunningStats results; /** pearson product correlation coefficients */ - final protected Map> correlation; + protected final Map> correlation; /** Base ctor */ public MatrixStatsResults() { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java index 51e5ce1cf27..e3aa171fe3d 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java @@ -52,7 +52,7 @@ public abstract class MultiValuesSourceAggregationBuilder> + public abstract static class LeafOnly> extends MultiValuesSourceAggregationBuilder { protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java index dd2b69696f3..0de5e13c058 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java @@ -165,8 +165,8 @@ public abstract class MultiValuesSourceParser implement return factory; } - private final void parseMissingAndAdd(final String aggregationName, final String currentFieldName, - XContentParser parser, final Map missing) throws IOException { + private void parseMissingAndAdd(final String aggregationName, final String currentFieldName, + XContentParser parser, final Map missing) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java index b1296bb1146..2e4fa4313bd 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java @@ -34,8 +34,8 @@ public abstract class BaseMatrixStatsTestCase extends ESTestCase { protected final ArrayList fieldA = new ArrayList<>(numObs); protected final ArrayList fieldB = new ArrayList<>(numObs); protected final MultiPassStats actualStats = new MultiPassStats(); - protected final static String fieldAKey = "fieldA"; - protected final static String fieldBKey = "fieldB"; + protected static final String fieldAKey = "fieldA"; + protected static final String fieldBKey = "fieldB"; @Before public void setup() { diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java index d9ffd34cac1..a0fcdb1e0d1 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; @@ -53,7 +53,7 @@ abstract class AbstractStringProcessor extends AbstractProcessor { protected abstract String process(String value); - static abstract class Factory extends AbstractProcessorFactory { + abstract static class Factory implements Processor.Factory { protected final String processorType; protected Factory(String processorType) { @@ -61,11 +61,11 @@ abstract class AbstractStringProcessor extends AbstractProcessor { } @Override - public T doCreate(String processorTag, Map config) throws Exception { + public AbstractStringProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(processorType, processorTag, config, "field"); return newProcessor(processorTag, field); } - protected abstract T newProcessor(String processorTag, String field); + protected abstract AbstractStringProcessor newProcessor(String processorTag, String field); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java index af163c3c187..26b0e66a63c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java @@ -20,9 +20,10 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; @@ -64,7 +65,7 @@ public final class AppendProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -73,7 +74,7 @@ public final class AppendProcessor extends AbstractProcessor { } @Override - public AppendProcessor doCreate(String processorTag, Map config) throws Exception { + public AppendProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); return new AppendProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService)); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java index 015c56c72c3..9b087fc208f 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.List; @@ -93,7 +93,7 @@ public final class ConvertProcessor extends AbstractProcessor { }; @Override - public final String toString() { + public String toString() { return name().toLowerCase(Locale.ROOT); } @@ -160,9 +160,9 @@ public final class ConvertProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public ConvertProcessor doCreate(String processorTag, Map config) throws Exception { + public ConvertProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String typeProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "type"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", field); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java index a94d4d048a8..c750c84c576 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java @@ -21,9 +21,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -120,10 +120,10 @@ public final class DateIndexNameProcessor extends AbstractProcessor { return dateFormats; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - protected DateIndexNameProcessor doCreate(String tag, Map config) throws Exception { + public DateIndexNameProcessor create(String tag, Map config) throws Exception { String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "locale"); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "timezone"); DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index b82b9c8b76c..e61ed513114 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -21,9 +21,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; @@ -108,10 +108,10 @@ public final class DateProcessor extends AbstractProcessor { return formats; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @SuppressWarnings("unchecked") - public DateProcessor doCreate(String processorTag, Map config) throws Exception { + public DateProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone"); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java index 6c434d85d5a..a24322f556f 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import java.util.Map; @@ -56,7 +56,7 @@ public final class FailProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -65,7 +65,7 @@ public final class FailProcessor extends AbstractProcessor { } @Override - public FailProcessor doCreate(String processorTag, Map config) throws Exception { + public FailProcessor create(String processorTag, Map config) throws Exception { String message = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "message"); return new FailProcessor(processorTag, templateService.compile(message)); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index b6d14d1b8c5..05be47633ba 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; @@ -83,7 +82,7 @@ public final class ForEachProcessor extends AbstractProcessor { return processors; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final ProcessorsRegistry processorRegistry; @@ -92,7 +91,7 @@ public final class ForEachProcessor extends AbstractProcessor { } @Override - protected ForEachProcessor doCreate(String tag, Map config) throws Exception { + public ForEachProcessor create(String tag, Map config) throws Exception { String field = readStringProperty(TYPE, tag, config, "field"); List>> processorConfigs = readList(TYPE, tag, config, "processors"); List processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, processorRegistry); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java index aa9fbb905cc..39d17ec09bf 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.HashMap; import java.util.List; @@ -114,7 +114,7 @@ public final class GrokProcessor extends AbstractProcessor { return combinedPattern; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final Map builtinPatterns; @@ -123,7 +123,7 @@ public final class GrokProcessor extends AbstractProcessor { } @Override - public GrokProcessor doCreate(String processorTag, Map config) throws Exception { + public GrokProcessor create(String processorTag, Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); List matchPatterns = ConfigurationUtils.readList(TYPE, processorTag, config, "patterns"); boolean traceMatch = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trace_match", false); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java index 72bc9e76710..1dfc566670f 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java @@ -20,8 +20,8 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; import java.util.regex.Matcher; @@ -78,9 +78,9 @@ public final class GsubProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public GsubProcessor doCreate(String processorTag, Map config) throws Exception { + public GsubProcessor create(String processorTag, Map config) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String pattern = readStringProperty(TYPE, processorTag, config, "pattern"); String replacement = readStringProperty(TYPE, processorTag, config, "replacement"); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 821e47b85bc..60bfdd37a9f 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -60,7 +60,7 @@ public class IngestCommonPlugin extends Plugin { nodeModule.registerProcessor(SortProcessor.TYPE, (registry) -> new SortProcessor.Factory()); nodeModule.registerProcessor(GrokProcessor.TYPE, (registry) -> new GrokProcessor.Factory(builtinPatterns)); nodeModule.registerProcessor(ScriptProcessor.TYPE, (registry) -> - new ScriptProcessor.Factory(registry.getScriptService(), registry.getClusterService())); + new ScriptProcessor.Factory(registry.getScriptService())); } // Code for loading built-in grok patterns packaged with the jar file: diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java index 8114d20f28f..da0011d20c8 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.List; import java.util.Map; @@ -70,9 +70,9 @@ public final class JoinProcessor extends AbstractProcessor { return TYPE; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public JoinProcessor doCreate(String processorTag, Map config) throws Exception { + public JoinProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String separator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator"); return new JoinProcessor(processorTag, field, separator); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java index 9f8ea7a5614..a0ae8e13158 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java @@ -44,7 +44,7 @@ public final class LowercaseProcessor extends AbstractStringProcessor { return TYPE; } - public final static class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java index 98c4e18a408..a7de33a7e2c 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import java.util.Map; @@ -55,7 +55,7 @@ public final class RemoveProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -64,7 +64,7 @@ public final class RemoveProcessor extends AbstractProcessor { } @Override - public RemoveProcessor doCreate(String processorTag, Map config) throws Exception { + public RemoveProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); return new RemoveProcessor(processorTag, templateService.compile(field)); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java index 9143321c4aa..ae81291b644 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; @@ -75,9 +75,9 @@ public final class RenameProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public RenameProcessor doCreate(String processorTag, Map config) throws Exception { + public RenameProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); return new RenameProcessor(processorTag, field, targetField); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 6ce85f8a6bf..4733c1f5866 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -19,20 +19,19 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.cluster.service.ClusterService; +import java.util.HashMap; +import java.util.Map; + import org.elasticsearch.common.Strings; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import java.util.HashMap; -import java.util.Map; - import static java.util.Collections.emptyMap; import static org.elasticsearch.common.Strings.hasLength; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; @@ -52,14 +51,12 @@ public final class ScriptProcessor extends AbstractProcessor { private final Script script; private final ScriptService scriptService; - private final ClusterService clusterService; private final String field; - ScriptProcessor(String tag, Script script, ScriptService scriptService, ClusterService clusterService, String field) { + ScriptProcessor(String tag, Script script, ScriptService scriptService, String field) { super(tag); this.script = script; this.scriptService = scriptService; - this.clusterService = clusterService; this.field = field; } @@ -67,7 +64,7 @@ public final class ScriptProcessor extends AbstractProcessor { public void execute(IngestDocument document) { Map vars = new HashMap<>(); vars.put("ctx", document.getSourceAndMetadata()); - CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.INGEST, emptyMap(), clusterService.state()); + CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.INGEST, emptyMap()); ExecutableScript executableScript = scriptService.executable(compiledScript, vars); Object value = executableScript.run(); if (field != null) { @@ -80,18 +77,16 @@ public final class ScriptProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final ScriptService scriptService; - private final ClusterService clusterService; - public Factory(ScriptService scriptService, ClusterService clusterService) { + public Factory(ScriptService scriptService) { this.scriptService = scriptService; - this.clusterService = clusterService; } @Override - public ScriptProcessor doCreate(String processorTag, Map config) throws Exception { + public ScriptProcessor create(String processorTag, Map config) throws Exception { String field = readOptionalStringProperty(TYPE, processorTag, config, "field"); String lang = readStringProperty(TYPE, processorTag, config, "lang"); String inline = readOptionalStringProperty(TYPE, processorTag, config, "inline"); @@ -120,7 +115,7 @@ public final class ScriptProcessor extends AbstractProcessor { throw newConfigurationException(TYPE, processorTag, null, "Could not initialize script"); } - return new ScriptProcessor(processorTag, script, scriptService, clusterService, field); + return new ScriptProcessor(processorTag, script, scriptService, field); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java index a78701645a9..ce328e34cb7 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; @@ -75,7 +75,7 @@ public final class SetProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -84,7 +84,7 @@ public final class SetProcessor extends AbstractProcessor { } @Override - public SetProcessor doCreate(String processorTag, Map config) throws Exception { + public SetProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", true); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java index 706a1cef9c5..d5994d411c9 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Collections; import java.util.List; @@ -111,10 +111,10 @@ public final class SortProcessor extends AbstractProcessor { return TYPE; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public SortProcessor doCreate(String processorTag, Map config) throws Exception { + public SortProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, FIELD); try { SortOrder direction = SortOrder.fromString( diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java index f7c5e8befc4..87526f62297 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.Collections; @@ -72,9 +72,9 @@ public final class SplitProcessor extends AbstractProcessor { return TYPE; } - public static class Factory extends AbstractProcessorFactory { + public static class Factory implements Processor.Factory { @Override - public SplitProcessor doCreate(String processorTag, Map config) throws Exception { + public SplitProcessor create(String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); return new SplitProcessor(processorTag, field, ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator")); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java index a57a25125d6..e852f887da0 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java @@ -41,7 +41,7 @@ public final class TrimProcessor extends AbstractStringProcessor { return TYPE; } - public static final class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java index a5c817352a1..5585a130eaf 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java @@ -43,7 +43,7 @@ public final class UppercaseProcessor extends AbstractStringProcessor { return TYPE; } - public static final class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java index fbf77cc4285..b49a44cc04c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -52,8 +51,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { } config.put("value", value); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AppendProcessor appendProcessor = factory.create(config); + AppendProcessor appendProcessor = factory.create(processorTag, config); assertThat(appendProcessor.getTag(), equalTo(processorTag)); assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(appendProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value)); @@ -63,7 +61,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("value", "value1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -74,7 +72,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); @@ -86,7 +84,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", null); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java index 1ec5362af14..7dd8bbf97e2 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -39,8 +38,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type.toString()); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - ConvertProcessor convertProcessor = factory.create(config); + ConvertProcessor convertProcessor = factory.create(processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); assertThat(convertProcessor.getTargetField(), equalTo("field1")); @@ -54,7 +52,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field.")); @@ -70,7 +68,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { String type = "type-" + randomAsciiOfLengthBetween(1, 10); config.put("type", type); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); @@ -82,7 +80,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[type] required property is missing")); @@ -97,8 +95,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("target_field", "field2"); config.put("type", type.toString()); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - ConvertProcessor convertProcessor = factory.create(config); + ConvertProcessor convertProcessor = factory.create(processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); assertThat(convertProcessor.getTargetField(), equalTo("field2")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java index 42877236b88..cc272d0b120 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java @@ -36,7 +36,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("date_rounding", "y"); - DateIndexNameProcessor processor = factory.create(config); + DateIndexNameProcessor processor = factory.create(null, config); assertThat(processor.getDateFormats().size(), Matchers.equalTo(1)); assertThat(processor.getField(), Matchers.equalTo("_field")); assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("")); @@ -53,7 +53,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("date_formats", Arrays.asList("UNIX", "UNIX_MS")); - DateIndexNameProcessor processor = factory.create(config); + DateIndexNameProcessor processor = factory.create(null, config); assertThat(processor.getDateFormats().size(), Matchers.equalTo(2)); config = new HashMap<>(); @@ -62,7 +62,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("index_name_format", "yyyyMMdd"); - processor = factory.create(config); + processor = factory.create(null, config); assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd")); config = new HashMap<>(); @@ -71,7 +71,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("timezone", "+02:00"); - processor = factory.create(config); + processor = factory.create(null, config); assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.forOffsetHours(2))); config = new HashMap<>(); @@ -79,7 +79,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("index_name_prefix", "_prefix"); config.put("date_rounding", "y"); - processor = factory.create(config); + processor = factory.create(null, config); assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix")); } @@ -87,12 +87,12 @@ public class DateIndexNameFactoryTests extends ESTestCase { DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); Map config = new HashMap<>(); config.put("date_rounding", "y"); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, config)); assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); config.clear(); config.put("field", "_field"); - e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, config)); assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java index 65dcdf6082c..9c5352c7ee4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; @@ -42,8 +41,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(sourceField)); assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD)); @@ -60,7 +58,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", Collections.singletonList("dd/MM/yyyyy")); try { - factory.create(config); + factory.create(null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[field] required property is missing")); @@ -76,7 +74,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("target_field", targetField); try { - factory.create(config); + factory.create(null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[formats] required property is missing")); @@ -92,7 +90,7 @@ public class DateProcessorFactoryTests extends ESTestCase { Locale locale = randomLocale(random()); config.put("locale", locale.toLanguageTag()); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, config); assertThat(processor.getLocale().toLanguageTag(), equalTo(locale.toLanguageTag())); } @@ -104,7 +102,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", Collections.singletonList("dd/MM/yyyyy")); config.put("locale", "invalid_locale"); try { - factory.create(config); + factory.create(null, config); fail("should fail with invalid locale"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Invalid language tag specified: invalid_locale")); @@ -120,7 +118,7 @@ public class DateProcessorFactoryTests extends ESTestCase { DateTimeZone timezone = randomDateTimeZone(); config.put("timezone", timezone.getID()); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, config); assertThat(processor.getTimezone(), equalTo(timezone)); } @@ -132,7 +130,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("timezone", "invalid_timezone"); try { - factory.create(config); + factory.create(null, config); fail("invalid timezone should fail"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("The datetime zone id 'invalid_timezone' is not recognised")); @@ -146,7 +144,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, config); assertThat(processor.getFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"))); } @@ -158,7 +156,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", "dd/MM/yyyy"); try { - factory.create(config); + factory.create(null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]")); @@ -174,7 +172,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("target_field", targetField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, config); assertThat(processor.getTargetField(), equalTo(targetField)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java index db16b78b316..a385a58ef50 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -44,8 +43,7 @@ public class FailProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("message", "error"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - FailProcessor failProcessor = factory.create(config); + FailProcessor failProcessor = factory.create(processorTag, config); assertThat(failProcessor.getTag(), equalTo(processorTag)); assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); } @@ -53,7 +51,7 @@ public class FailProcessorFactoryTests extends ESTestCase { public void testCreateMissingMessageField() throws Exception { Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[message] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java index d45e98ab06a..0bbad532fb3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java @@ -38,14 +38,14 @@ public class ForEachProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); Processor processor = new TestProcessor(ingestDocument -> {}); - builder.registerProcessor("_name", (registry) -> config -> processor); + builder.registerProcessor("_name", (registry) -> (tag, config) -> processor); ProcessorsRegistry registry = builder.build(mock(ScriptService.class), mock(ClusterService.class)); ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(registry); Map config = new HashMap<>(); config.put("field", "_field"); config.put("processors", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap()))); - ForEachProcessor forEachProcessor = forEachFactory.create(config); + ForEachProcessor forEachProcessor = forEachFactory.create(null, config); assertThat(forEachProcessor, Matchers.notNullValue()); assertThat(forEachProcessor.getField(), Matchers.equalTo("_field")); assertThat(forEachProcessor.getProcessors().size(), Matchers.equalTo(1)); @@ -54,7 +54,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase { config = new HashMap<>(); config.put("processors", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap()))); try { - forEachFactory.create(config); + forEachFactory.create(null, config); fail("exception expected"); } catch (Exception e) { assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); @@ -63,7 +63,7 @@ public class ForEachProcessorFactoryTests extends ESTestCase { config = new HashMap<>(); config.put("field", "_field"); try { - forEachFactory.create(config); + forEachFactory.create(null, config); fail("exception expected"); } catch (Exception e) { assertThat(e.getMessage(), Matchers.equalTo("[processors] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java index a7a133b4363..1287d066420 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -39,8 +38,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("(?\\w+)")); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GrokProcessor processor = factory.create(config); + GrokProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); @@ -50,7 +48,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("patterns", Collections.singletonList("(?\\w+)")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create("tag", config)); assertThat(e.getMessage(), equalTo("[field] required property is missing")); } @@ -58,7 +56,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("field", "foo"); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create("tag", config)); assertThat(e.getMessage(), equalTo("[patterns] required property is missing")); } @@ -67,7 +65,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "foo"); config.put("patterns", Collections.emptyList()); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create("tag", config)); assertThat(e.getMessage(), equalTo("[patterns] List of patterns must not be empty")); } @@ -78,7 +76,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "foo")); - GrokProcessor processor = factory.create(config); + GrokProcessor processor = factory.create(null, config); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); assertThat(processor.getGrok().match("foo!"), equalTo(true)); @@ -89,7 +87,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("patterns", Collections.singletonList("[")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create("tag", config)); assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [[]. premature end of char-class")); } @@ -99,7 +97,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "[")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create("tag", config)); assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java index 60cceb34024..0086457f857 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -38,8 +37,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "\\."); config.put("replacement", "-"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GsubProcessor gsubProcessor = factory.create(config); + GsubProcessor gsubProcessor = factory.create(processorTag, config); assertThat(gsubProcessor.getTag(), equalTo(processorTag)); assertThat(gsubProcessor.getField(), equalTo("field1")); assertThat(gsubProcessor.getPattern().toString(), equalTo("\\.")); @@ -52,7 +50,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "\\."); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -65,7 +63,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[pattern] required property is missing")); @@ -78,7 +76,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("pattern", "\\."); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[replacement] required property is missing")); @@ -92,7 +90,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "["); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[pattern] Invalid regex pattern. Unclosed character class")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java index 970fd8b8b9a..b2386c17a2b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "-"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - JoinProcessor joinProcessor = factory.create(config); + JoinProcessor joinProcessor = factory.create(processorTag, config); assertThat(joinProcessor.getTag(), equalTo(processorTag)); assertThat(joinProcessor.getField(), equalTo("field1")); assertThat(joinProcessor.getSeparator(), equalTo("-")); @@ -48,7 +46,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("separator", "-"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[separator] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java index 4dec115458c..2e0682beb6e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - LowercaseProcessor uppercaseProcessor = factory.create(config); + LowercaseProcessor uppercaseProcessor = (LowercaseProcessor)factory.create(processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java index a5f88103e96..133ddbeebfa 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -44,8 +43,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - RemoveProcessor removeProcessor = factory.create(config); + RemoveProcessor removeProcessor = factory.create(processorTag, config); assertThat(removeProcessor.getTag(), equalTo(processorTag)); assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); } @@ -53,7 +51,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { public void testCreateMissingField() throws Exception { Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index c078f09dd92..b969a3b6247 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { config.put("field", "old_field"); config.put("target_field", "new_field"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - RenameProcessor renameProcessor = factory.create(config); + RenameProcessor renameProcessor = factory.create(processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getField(), equalTo("old_field")); assertThat(renameProcessor.getTargetField(), equalTo("new_field")); @@ -48,7 +46,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("target_field", "new_field"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "old_field"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index 59a5d7ceaa2..79048cd7769 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -37,7 +37,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { @Before public void init() { - factory = new ScriptProcessor.Factory(mock(ScriptService.class), mock(ClusterService.class)); + factory = new ScriptProcessor.Factory(mock(ScriptService.class)); } @@ -55,7 +55,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.doCreate(randomAsciiOfLength(10), configMap)); + () -> factory.create(randomAsciiOfLength(10), configMap)); assertThat(exception.getMessage(), is("[null] Only one of [file], [id], or [inline] may be configured")); } @@ -66,7 +66,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.doCreate(randomAsciiOfLength(10), configMap)); + () -> factory.create(randomAsciiOfLength(10), configMap)); assertThat(exception.getMessage(), is("[null] Need [file], [id], or [inline] parameter to refer to scripts")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 6da127a798b..5fe7db77dd8 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.cluster.service.ClusterService; +import java.util.HashMap; +import java.util.Map; + import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.script.CompiledScript; @@ -28,9 +30,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import java.util.HashMap; -import java.util.Map; - import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.core.Is.is; import static org.mockito.Mockito.any; @@ -42,16 +41,15 @@ public class ScriptProcessorTests extends ESTestCase { public void testScripting() throws Exception { int randomInt = randomInt(); ScriptService scriptService = mock(ScriptService.class); - ClusterService clusterService = mock(ClusterService.class); CompiledScript compiledScript = mock(CompiledScript.class); Script script = mock(Script.class); - when(scriptService.compile(any(), any(), any(), any())).thenReturn(compiledScript); + when(scriptService.compile(any(), any(), any())).thenReturn(compiledScript); ExecutableScript executableScript = mock(ExecutableScript.class); when(scriptService.executable(any(), any())).thenReturn(executableScript); when(executableScript.run()).thenReturn(randomInt); ScriptProcessor processor = new ScriptProcessor(randomAsciiOfLength(10), script, - scriptService, clusterService, "bytes_total"); + scriptService, "bytes_total"); Map document = new HashMap<>(); document.put("bytes_in", 1234); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index b8c97a379cb..cbbf3f40902 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -45,8 +44,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", "value1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SetProcessor setProcessor = factory.create(config); + SetProcessor setProcessor = factory.create(processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); @@ -60,8 +58,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("value", "value1"); config.put("override", overrideEnabled); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SetProcessor setProcessor = factory.create(config); + SetProcessor setProcessor = factory.create(processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); @@ -72,7 +69,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("value", "value1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -83,7 +80,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); @@ -95,7 +92,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", null); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java index c747807b710..4f85d61e629 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "\\."); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SplitProcessor splitProcessor = factory.create(config); + SplitProcessor splitProcessor = factory.create(processorTag, config); assertThat(splitProcessor.getTag(), equalTo(processorTag)); assertThat(splitProcessor.getField(), equalTo("field1")); assertThat(splitProcessor.getSeparator(), equalTo("\\.")); @@ -48,7 +46,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("separator", "\\."); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[separator] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java index 13d45dc126b..a7e1313a099 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java @@ -84,7 +84,7 @@ public class SplitProcessorTests extends ESTestCase { Map splitConfig = new HashMap<>(); splitConfig.put("field", "flags"); splitConfig.put("separator", "\\|"); - Processor splitProcessor = (new SplitProcessor.Factory()).create(splitConfig); + Processor splitProcessor = (new SplitProcessor.Factory()).create("tag", splitConfig); Map source = new HashMap<>(); source.put("flags", "new|hot|super|fun|interesting"); IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>()); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java index 54904775478..fa9a33f41de 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - TrimProcessor uppercaseProcessor = factory.create(config); + TrimProcessor uppercaseProcessor = (TrimProcessor)factory.create(processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java index cd4d1faf767..91698d4fcc4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - UppercaseProcessor uppercaseProcessor = factory.create(config); + UppercaseProcessor uppercaseProcessor = (UppercaseProcessor)factory.create(processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 105d42c8c86..a83dd93a17e 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -155,7 +155,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri GroovyClassLoader groovyClassLoader = new GroovyClassLoader(loader, configuration); return groovyClassLoader.parseClass(codeSource); - } catch (Throwable e) { + } catch (Exception e) { if (logger.isTraceEnabled()) { logger.trace("Exception compiling Groovy script:", e); } @@ -293,7 +293,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri // NOTE: we truncate the stack because IndyInterface has security issue (needs getClassLoader) // we don't do a security check just as a tradeoff, it cannot really escalate to anything. return AccessController.doPrivileged((PrivilegedAction) script::run); - } catch (Throwable e) { + } catch (Exception e) { if (logger.isTraceEnabled()) { logger.trace("failed to run {}", e, compiledScript); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java index 5ada804954a..b4260cf5530 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; @@ -825,6 +826,83 @@ public class HistogramTests extends ESIntegTestCase { } } + public void testEmptyWithExtendedBounds() throws Exception { + int lastDataBucketKey = (numValueBuckets - 1) * interval; + + // randomizing the number of buckets on the min bound + // (can sometimes fall within the data range, but more frequently will fall before the data range) + int addedBucketsLeft = randomIntBetween(0, numValueBuckets); + long boundsMinKey = addedBucketsLeft * interval; + if (frequently()) { + boundsMinKey = -boundsMinKey; + } else { + addedBucketsLeft = 0; + } + long boundsMin = boundsMinKey + randomIntBetween(0, interval - 1); + + // randomizing the number of buckets on the max bound + // (can sometimes fall within the data range, but more frequently will fall after the data range) + int addedBucketsRight = randomIntBetween(0, numValueBuckets); + long boundsMaxKeyDelta = addedBucketsRight * interval; + if (rarely()) { + addedBucketsRight = 0; + boundsMaxKeyDelta = -boundsMaxKeyDelta; + } + long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; + long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); + + + // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an + // error + boolean invalidBoundsError = boundsMin > boundsMax; + + // constructing the newly expected bucket list + int bucketsCount = (int) ((boundsMaxKey - boundsMinKey) / interval) + 1; + long[] extendedValueCounts = new long[valueCounts.length + addedBucketsLeft + addedBucketsRight]; + System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); + + SearchResponse response = null; + try { + response = client().prepareSearch("idx") + .setQuery(QueryBuilders.termQuery("foo", "bar")) + .addAggregation(histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .minDocCount(0) + .extendedBounds(new ExtendedBounds(boundsMin, boundsMax))) + .execute().actionGet(); + + if (invalidBoundsError) { + fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); + return; + } + + } catch (Exception e) { + if (invalidBoundsError) { + // expected + return; + } else { + throw e; + } + } + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(bucketsCount)); + + long key = boundsMinKey; + for (int i = 0; i < bucketsCount; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(0L)); + key += interval; + } + } + /** * see issue #9634, negative interval in histogram should raise exception */ diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index 662d4d2f30c..640c00b291d 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -316,7 +315,8 @@ public class MinDocCountTests extends AbstractTermsTestCase { Thread.sleep(60000); logger.debug("1m passed. retrying."); testMinDocCountOnTerms(field, script, order, include, false); - } catch (Throwable secondFailure) { + } catch (Exception secondFailure) { + secondFailure.addSuppressed(ae); logger.error("exception on retry (will re-throw the original in a sec)", secondFailure); } throw ae; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 6a8be584085..b7be9693210 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -465,7 +465,7 @@ public class SearchFieldsTests extends ESIntegTestCase { String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) dateTime)); assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE)); - assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()).toBytesArray(), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8")))); + assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8")))); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java index 4a7b4350d23..f2eee2bb408 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java @@ -99,15 +99,15 @@ public class GroovyScriptTests extends ESIntegTestCase { try { client().prepareSearch("test") - .setQuery(constantScoreQuery(scriptQuery(new Script("assert false", ScriptType.INLINE, "groovy", null)))).get(); + .setQuery(constantScoreQuery(scriptQuery(new Script("null.foo", ScriptType.INLINE, "groovy", null)))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should not contained NotSerializableTransportException", e.toString().contains("NotSerializableTransportException"), equalTo(false)); assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString()+ "should have contained an assert error", - e.toString().contains("AssertionError[assert false"), equalTo(true)); + assertThat(e.toString()+ "should have contained a NullPointerException", + e.toString().contains("NullPointerException[Cannot get property 'foo' on null object]"), equalTo(true)); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java index c779757f61b..f9e99ffc7e3 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java @@ -40,21 +40,21 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera */ public static class Item implements Streamable { private SearchTemplateResponse response; - private Throwable throwable; + private Exception exception; Item() { } - public Item(SearchTemplateResponse response, Throwable throwable) { + public Item(SearchTemplateResponse response, Exception exception) { this.response = response; - this.throwable = throwable; + this.exception = exception; } /** * Is it a failed search? */ public boolean isFailure() { - return throwable != null; + return exception != null; } /** @@ -62,7 +62,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera */ @Nullable public String getFailureMessage() { - return throwable == null ? null : throwable.getMessage(); + return exception == null ? null : exception.getMessage(); } /** @@ -85,7 +85,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera this.response = new SearchTemplateResponse(); response.readFrom(in); } else { - throwable = in.readThrowable(); + exception = in.readException(); } } @@ -96,12 +96,12 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera response.writeTo(out); } else { out.writeBoolean(false); - out.writeThrowable(throwable); + out.writeException(exception); } } - public Throwable getFailure() { - return throwable; + public Exception getFailure() { + return exception; } } @@ -150,7 +150,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera for (Item item : items) { builder.startObject(); if (item.isFailure()) { - ElasticsearchException.renderThrowable(builder, params, item.getFailure()); + ElasticsearchException.renderException(builder, params, item.getFailure()); } else { item.getResponse().toXContent(builder, params); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java index 1ffb19b5fc4..642fe7648da 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java @@ -61,7 +61,7 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction encoder; + + public CustomMustacheFactory(boolean escaping) { + super(); + setObjectHandler(new CustomReflectionObjectHandler()); + if (escaping) { + this.encoder = new JsonEscapeEncoder(); + } else { + this.encoder = new NoEscapeEncoder(); + } + } + + @Override + public void encode(String value, Writer writer) { + encoder.accept(value, writer); + } + + @Override + public MustacheVisitor createMustacheVisitor() { + return new CustomMustacheVisitor(this); + } + + class CustomMustacheVisitor extends DefaultMustacheVisitor { + + public CustomMustacheVisitor(DefaultMustacheFactory df) { + super(df); + } + + @Override + public void iterable(TemplateContext templateContext, String variable, Mustache mustache) { + if (ToJsonCode.match(variable)) { + list.add(new ToJsonCode(templateContext, df, mustache, variable)); + } else if (JoinerCode.match(variable)) { + list.add(new JoinerCode(templateContext, df, mustache)); + } else if (CustomJoinerCode.match(variable)) { + list.add(new CustomJoinerCode(templateContext, df, mustache, variable)); + } else { + list.add(new IterableCode(templateContext, df, mustache, variable)); + } + } + } + + /** + * Base class for custom Mustache functions + */ + abstract static class CustomCode extends IterableCode { + + private final String code; + + public CustomCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache, String code) { + super(tc, df, mustache, extractVariableName(code, mustache, tc)); + this.code = Objects.requireNonNull(code); + } + + @Override + public Writer execute(Writer writer, final List scopes) { + Object resolved = get(scopes); + writer = handle(writer, createFunction(resolved), scopes); + appendText(writer); + return writer; + } + + @Override + protected void tag(Writer writer, String tag) throws IOException { + writer.write(tc.startChars()); + writer.write(tag); + writer.write(code); + writer.write(tc.endChars()); + } + + protected abstract Function createFunction(Object resolved); + + /** + * At compile time, this function extracts the name of the variable: + * {{#toJson}}variable_name{{/toJson}} + */ + protected static String extractVariableName(String fn, Mustache mustache, TemplateContext tc) { + Code[] codes = mustache.getCodes(); + if (codes == null || codes.length != 1) { + throw new MustacheException("Mustache function [" + fn + "] must contain one and only one identifier"); + } + + try (StringWriter capture = new StringWriter()) { + // Variable name is in plain text and has type WriteCode + if (codes[0] instanceof WriteCode) { + codes[0].execute(capture, Collections.emptyList()); + return capture.toString(); + } else { + codes[0].identity(capture); + return capture.toString(); + } + } catch (IOException e) { + throw new MustacheException("Exception while parsing mustache function [" + fn + "] at line " + tc.line(), e); + } + } + } + + /** + * This function renders {@link Iterable} and {@link Map} as their JSON representation + */ + static class ToJsonCode extends CustomCode { + + private static final String CODE = "toJson"; + + public ToJsonCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache, String variable) { + super(tc, df, mustache, CODE); + if (CODE.equalsIgnoreCase(variable) == false) { + throw new MustacheException("Mismatch function code [" + CODE + "] cannot be applied to [" + variable + "]"); + } + } + + @Override + @SuppressWarnings("unchecked") + protected Function createFunction(Object resolved) { + return s -> { + if (resolved == null) { + return null; + } + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + if (resolved == null) { + builder.nullValue(); + } else if (resolved instanceof Iterable) { + builder.startArray(); + for (Object o : (Iterable) resolved) { + builder.value(o); + } + builder.endArray(); + } else if (resolved instanceof Map) { + builder.map((Map) resolved); + } else { + // Do not handle as JSON + return oh.stringify(resolved); + } + return builder.string(); + } catch (IOException e) { + throw new MustacheException("Failed to convert object to JSON", e); + } + }; + } + + static boolean match(String variable) { + return CODE.equalsIgnoreCase(variable); + } + } + + /** + * This function concatenates the values of an {@link Iterable} using a given delimiter + */ + static class JoinerCode extends CustomCode { + + protected static final String CODE = "join"; + private static final String DEFAULT_DELIMITER = ","; + + private final String delimiter; + + public JoinerCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache, String delimiter) { + super(tc, df, mustache, CODE); + this.delimiter = delimiter; + } + + public JoinerCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache) { + this(tc, df, mustache, DEFAULT_DELIMITER); + } + + @Override + protected Function createFunction(Object resolved) { + return s -> { + if (s == null) { + return null; + } else if (resolved instanceof Iterable) { + StringJoiner joiner = new StringJoiner(delimiter); + for (Object o : (Iterable) resolved) { + joiner.add(oh.stringify(o)); + } + return joiner.toString(); + } + return s; + }; + } + + static boolean match(String variable) { + return CODE.equalsIgnoreCase(variable); + } + } + + static class CustomJoinerCode extends JoinerCode { + + private static final Pattern PATTERN = Pattern.compile("^(?:" + CODE + " delimiter='(.*)')$"); + + public CustomJoinerCode(TemplateContext tc, DefaultMustacheFactory df, Mustache mustache, String variable) { + super(tc, df, mustache, extractDelimiter(variable)); + } + + private static String extractDelimiter(String variable) { + Matcher matcher = PATTERN.matcher(variable); + if (matcher.find()) { + return matcher.group(1); + } + throw new MustacheException("Failed to extract delimiter for join function"); + } + + static boolean match(String variable) { + return PATTERN.matcher(variable).matches(); + } + } + + class NoEscapeEncoder implements BiConsumer { + + @Override + public void accept(String s, Writer writer) { + try { + writer.write(s); + } catch (IOException e) { + throw new MustacheException("Failed to encode value: " + s); + } + } + } + + class JsonEscapeEncoder implements BiConsumer { + + @Override + public void accept(String s, Writer writer) { + try { + writer.write(JsonStringEncoder.getInstance().quoteAsString(s)); + } catch (IOException e) { + throw new MustacheException("Failed to escape and encode value: " + s); + } + } + } +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java index 45d3d8c182d..dd3055ba8e8 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java @@ -49,7 +49,7 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler { } } - final static class ArrayMap extends AbstractMap implements Iterable { + static final class ArrayMap extends AbstractMap implements Iterable { private final Object array; private final int length; @@ -109,7 +109,7 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler { } - final static class CollectionMap extends AbstractMap implements Iterable { + static final class CollectionMap extends AbstractMap implements Iterable { private final Collection col; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index 42f0da3c109..d3ffa13cd54 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -19,45 +19,44 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.search.template.MultiSearchTemplateAction; import org.elasticsearch.action.search.template.SearchTemplateAction; import org.elasticsearch.action.search.template.TransportMultiSearchTemplateAction; import org.elasticsearch.action.search.template.TransportSearchTemplateAction; -import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.action.search.template.RestDeleteSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestGetSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestMultiSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestPutSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestRenderSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; -import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptModule; -public class MustachePlugin extends Plugin implements ScriptPlugin { +import java.util.Arrays; +import java.util.List; + +public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin { @Override public ScriptEngineService getScriptEngineService(Settings settings) { return new MustacheScriptEngineService(settings); } - public void onModule(ActionModule module) { - module.registerAction(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class); - module.registerAction(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class); + @Override + public List, ? extends ActionResponse>> getActions() { + return Arrays.asList(new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class), + new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class)); } - public void onModule(NetworkModule module) { - if (module.isTransportClient() == false) { - module.registerRestHandler(RestSearchTemplateAction.class); - module.registerRestHandler(RestMultiSearchTemplateAction.class); - module.registerRestHandler(RestGetSearchTemplateAction.class); - module.registerRestHandler(RestPutSearchTemplateAction.class); - module.registerRestHandler(RestDeleteSearchTemplateAction.class); - module.registerRestHandler(RestRenderSearchTemplateAction.class); - } + @Override + public List> getRestHandlers() { + return Arrays.asList(RestSearchTemplateAction.class, RestMultiSearchTemplateAction.class, RestGetSearchTemplateAction.class, + RestPutSearchTemplateAction.class, RestDeleteSearchTemplateAction.class, RestRenderSearchTemplateAction.class); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index 2a48567333b..66ecf23fa02 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.script.mustache; -import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; +import com.github.mustachejava.MustacheFactory; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -29,8 +29,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.GeneralScriptException; +import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; @@ -89,21 +89,13 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme * */ @Override public Object compile(String templateName, String templateSource, Map params) { - String contentType = params.getOrDefault(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); - final DefaultMustacheFactory mustacheFactory; - switch (contentType){ - case PLAIN_TEXT_CONTENT_TYPE: - mustacheFactory = new NoneEscapingMustacheFactory(); - break; - case JSON_CONTENT_TYPE: - default: - // assume that the default is json encoding: - mustacheFactory = new JsonEscapingMustacheFactory(); - break; - } - mustacheFactory.setObjectHandler(new CustomReflectionObjectHandler()); + final MustacheFactory factory = new CustomMustacheFactory(isJsonEscapingEnabled(params)); Reader reader = new FastStringReader(templateSource); - return mustacheFactory.compile(reader, "query-template"); + return factory.compile(reader, "query-template"); + } + + private boolean isJsonEscapingEnabled(Map params) { + return JSON_CONTENT_TYPE.equals(params.getOrDefault(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE)); } @Override @@ -168,12 +160,9 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme if (sm != null) { sm.checkPermission(SPECIAL_PERMISSION); } - AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Void run() { - ((Mustache) template.compiled()).execute(writer, vars); - return null; - } + AccessController.doPrivileged((PrivilegedAction) () -> { + ((Mustache) template.compiled()).execute(writer, vars); + return null; }); } catch (Exception e) { logger.error("Error running {}", e, template); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java deleted file mode 100644 index 3539402df98..00000000000 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.script.mustache; - -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.MustacheException; - -import java.io.IOException; -import java.io.Writer; - -/** - * A MustacheFactory that does no string escaping. - */ -final class NoneEscapingMustacheFactory extends DefaultMustacheFactory { - - @Override - public void encode(String value, Writer writer) { - try { - writer.write(value); - } catch (IOException e) { - throw new MustacheException("Failed to encode value: " + value); - } - } -} diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java index 9c28225c081..03d04e518dc 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java @@ -146,14 +146,14 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { SearchTemplateResponse searchTemplateResponse1 = response1.getResponse(); assertThat(searchTemplateResponse1.hasResponse(), is(true)); assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2)); - assertThat(searchTemplateResponse1.getSource().toUtf8(), + assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo("{\"query\":{\"match\":{\"odd\":\"true\"}}}")); MultiSearchTemplateResponse.Item response2 = response.getResponses()[1]; assertThat(response2.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse2 = response2.getResponse(); assertThat(searchTemplateResponse2.hasResponse(), is(false)); - assertThat(searchTemplateResponse2.getSource().toUtf8(), + assertThat(searchTemplateResponse2.getSource().utf8ToString(), equalTo("{\"query\":{\"match_phrase_prefix\":{\"message\":\"quick brown f\"}}}")); MultiSearchTemplateResponse.Item response3 = response.getResponses()[2]; @@ -161,7 +161,7 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { SearchTemplateResponse searchTemplateResponse3 = response3.getResponse(); assertThat(searchTemplateResponse3.hasResponse(), is(true)); assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2)); - assertThat(searchTemplateResponse3.getSource().toUtf8(), + assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo("{\"query\":{\"term\":{\"odd\":\"false\"}}}")); MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; @@ -173,7 +173,7 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { assertThat(response5.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse5 = response5.getResponse(); assertThat(searchTemplateResponse5.hasResponse(), is(false)); - assertThat(searchTemplateResponse5.getSource().toUtf8(), + assertThat(searchTemplateResponse5.getSource().utf8ToString(), equalTo("{\"query\":{\"terms\":{\"group\":[1,2,3,]}}}")); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 0cca2c41938..fb8979562ff 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.multibindings.Multibinder; @@ -40,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -120,7 +118,7 @@ public class TemplateQueryParserTests extends ESTestCase { b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); }, settingsModule, - new SearchModule(settings, new NamedWriteableRegistry()) { + new SearchModule(settings, new NamedWriteableRegistry(), false) { @Override protected void configureSearch() { // skip so we don't need transport @@ -129,23 +127,18 @@ public class TemplateQueryParserTests extends ESTestCase { new IndexSettingsModule(index, settings) ).createInjector(); - AnalysisService analysisService = new AnalysisRegistry(null, environment).build(idxSettings); + AnalysisService analysisService = createAnalysisService(idxSettings, settings); SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); - MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry()).getMapperRegistry(); + MapperRegistry mapperRegistry = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()).getMapperRegistry(); MapperService mapperService = new MapperService(idxSettings, analysisService, similarityService, mapperRegistry, () -> contextFactory.get()); IndicesFieldDataCache cache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {}); - IndexFieldDataService indexFieldDataService =new IndexFieldDataService(idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService); + IndexFieldDataService indexFieldDataService = new IndexFieldDataService(idxSettings, cache, injector.getInstance(CircuitBreakerService.class), mapperService); BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() { @Override - public void onCache(ShardId shardId, Accountable accountable) { - - } - + public void onCache(ShardId shardId, Accountable accountable) {} @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } + public void onRemoval(ShardId shardId, Accountable accountable) {} }); IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); contextFactory = () -> new QueryShardContext(idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 254020066b5..bdba0346a85 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.script.mustache; +import com.github.mustachejava.MustacheFactory; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; @@ -27,7 +28,6 @@ import org.junit.Before; import java.io.IOException; import java.io.StringWriter; -import java.nio.charset.Charset; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -39,12 +39,12 @@ import static org.hamcrest.Matchers.equalTo; */ public class MustacheScriptEngineTests extends ESTestCase { private MustacheScriptEngineService qe; - private JsonEscapingMustacheFactory escaper; + private MustacheFactory factory; @Before public void setup() { qe = new MustacheScriptEngineService(Settings.Builder.EMPTY_SETTINGS); - escaper = new JsonEscapingMustacheFactory(); + factory = new CustomMustacheFactory(true); } public void testSimpleParameterReplace() { @@ -57,7 +57,7 @@ public class MustacheScriptEngineTests extends ESTestCase { BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", - new String(o.toBytes(), Charset.forName("UTF-8"))); + o.utf8ToString()); } { String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," @@ -68,19 +68,19 @@ public class MustacheScriptEngineTests extends ESTestCase { BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", - new String(o.toBytes(), Charset.forName("UTF-8"))); + o.utf8ToString()); } } public void testEscapeJson() throws IOException { { StringWriter writer = new StringWriter(); - escaper.encode("hello \n world", writer); + factory.encode("hello \n world", writer); assertThat(writer.toString(), equalTo("hello \\n world")); } { StringWriter writer = new StringWriter(); - escaper.encode("\n", writer); + factory.encode("\n", writer); assertThat(writer.toString(), equalTo("\\n")); } @@ -135,7 +135,7 @@ public class MustacheScriptEngineTests extends ESTestCase { expect.append(escapedChars[charIndex]); } StringWriter target = new StringWriter(); - escaper.encode(writer.toString(), target); + factory.encode(writer.toString(), target); assertThat(expect.toString(), equalTo(target.toString())); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index f850f117cb6..91098eb1c88 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -19,13 +19,16 @@ package org.elasticsearch.script.mustache; import com.github.mustachejava.Mustache; +import com.github.mustachejava.MustacheException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptEngineService; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matcher; import java.util.Arrays; import java.util.Collections; @@ -38,6 +41,8 @@ import java.util.Set; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.script.ScriptService.ScriptType.INLINE; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.CONTENT_TYPE_PARAM; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.JSON_CONTENT_TYPE; import static org.elasticsearch.script.mustache.MustacheScriptEngineService.PLAIN_TEXT_CONTENT_TYPE; @@ -45,6 +50,8 @@ import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isEmptyOrNullString; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; public class MustacheTests extends ESTestCase { @@ -59,19 +66,19 @@ public class MustacheTests extends ESTestCase { Map params = Collections.singletonMap("boost_val", "0.2"); Mustache mustache = (Mustache) engine.compile(null, template, Collections.emptyMap()); - CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "my-name", "mustache", mustache); + CompiledScript compiledScript = new CompiledScript(INLINE, "my-name", "mustache", mustache); ExecutableScript result = engine.executable(compiledScript, params); assertEquals( "Mustache templating broken", "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", - ((BytesReference) result.run()).toUtf8() + ((BytesReference) result.run()).utf8ToString() ); } public void testArrayAccess() throws Exception { String template = "{{data.0}} {{data.1}}"; - CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); + CompiledScript mustache = new CompiledScript(INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); Map vars = new HashMap<>(); Object data = randomFrom( new String[] { "foo", "bar" }, @@ -81,7 +88,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); // Sets can come out in any order Set setData = new HashSet<>(); @@ -92,12 +99,12 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + assertThat(bytes.utf8ToString(), both(containsString("foo")).and(containsString("bar"))); } public void testArrayInArrayAccess() throws Exception { String template = "{{data.0.0}} {{data.0.1}}"; - CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); + CompiledScript mustache = new CompiledScript(INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); Map vars = new HashMap<>(); Object data = randomFrom( new String[][] { new String[] { "foo", "bar" }}, @@ -109,12 +116,12 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); } public void testMapInArrayAccess() throws Exception { String template = "{{data.0.key}} {{data.1.key}}"; - CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); + CompiledScript mustache = new CompiledScript(INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); Map vars = new HashMap<>(); Object data = randomFrom( new Object[] { singletonMap("key", "foo"), singletonMap("key", "bar") }, @@ -124,7 +131,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); // HashSet iteration order isn't fixed Set setData = new HashSet<>(); @@ -135,25 +142,25 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + assertThat(bytes.utf8ToString(), both(containsString("foo")).and(containsString("bar"))); } public void testEscaping() { // json string escaping enabled: Map params = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); Mustache mustache = (Mustache) engine.compile(null, "{ \"field1\": \"{{value}}\"}", Collections.emptyMap()); - CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + CompiledScript compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); ExecutableScript executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); BytesReference rawResult = (BytesReference) executableScript.run(); - String result = rawResult.toUtf8(); + String result = rawResult.utf8ToString(); assertThat(result, equalTo("{ \"field1\": \"a \\\"value\\\"\"}")); // json string escaping disabled: mustache = (Mustache) engine.compile(null, "{ \"field1\": \"{{value}}\"}", Collections.singletonMap(CONTENT_TYPE_PARAM, PLAIN_TEXT_CONTENT_TYPE)); - compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); rawResult = (BytesReference) executableScript.run(); - result = rawResult.toUtf8(); + result = rawResult.utf8ToString(); assertThat(result, equalTo("{ \"field1\": \"a \"value\"\"}")); } @@ -162,7 +169,7 @@ public class MustacheTests extends ESTestCase { List randomList = Arrays.asList(generateRandomStringArray(10, 20, false)); String template = "{{data.array.size}} {{data.list.size}}"; - CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); + CompiledScript mustache = new CompiledScript(INLINE, "inline", "mustache", engine.compile(null, template, Collections.emptyMap())); Map data = new HashMap<>(); data.put("array", randomArrayValues); data.put("list", randomList); @@ -175,6 +182,207 @@ public class MustacheTests extends ESTestCase { BytesReference bytes = (BytesReference) output; String expectedString = String.format(Locale.ROOT, "%s %s", randomArrayValues.length, randomList.size()); - assertThat(bytes.toUtf8(), equalTo(expectedString)); + assertThat(bytes.utf8ToString(), equalTo(expectedString)); + } + + public void testPrimitiveToJSON() throws Exception { + String template = "{{#toJson}}ctx{{/toJson}}"; + assertScript(template, Collections.singletonMap("ctx", "value"), equalTo("value")); + assertScript(template, Collections.singletonMap("ctx", ""), equalTo("")); + assertScript(template, Collections.singletonMap("ctx", true), equalTo("true")); + assertScript(template, Collections.singletonMap("ctx", 42), equalTo("42")); + assertScript(template, Collections.singletonMap("ctx", 42L), equalTo("42")); + assertScript(template, Collections.singletonMap("ctx", 42.5f), equalTo("42.5")); + assertScript(template, Collections.singletonMap("ctx", null), equalTo("")); + + template = "{{#toJson}}.{{/toJson}}"; + assertScript(template, Collections.singletonMap("ctx", "value"), equalTo("{\"ctx\":\"value\"}")); + assertScript(template, Collections.singletonMap("ctx", ""), equalTo("{\"ctx\":\"\"}")); + assertScript(template, Collections.singletonMap("ctx", true), equalTo("{\"ctx\":true}")); + assertScript(template, Collections.singletonMap("ctx", 42), equalTo("{\"ctx\":42}")); + assertScript(template, Collections.singletonMap("ctx", 42L), equalTo("{\"ctx\":42}")); + assertScript(template, Collections.singletonMap("ctx", 42.5f), equalTo("{\"ctx\":42.5}")); + assertScript(template, Collections.singletonMap("ctx", null), equalTo("{\"ctx\":null}")); + } + + public void testSimpleMapToJSON() throws Exception { + Map human0 = new HashMap<>(); + human0.put("age", 42); + human0.put("name", "John Smith"); + human0.put("height", 1.84); + + Map ctx = Collections.singletonMap("ctx", human0); + + assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"ctx\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}}")); + assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}")); + assertScript("{{#toJson}}ctx.name{{/toJson}}", ctx, equalTo("John Smith")); + } + + public void testMultipleMapsToJSON() throws Exception { + Map human0 = new HashMap<>(); + human0.put("age", 42); + human0.put("name", "John Smith"); + human0.put("height", 1.84); + + Map human1 = new HashMap<>(); + human1.put("age", 27); + human1.put("name", "Dave Smith"); + human1.put("height", 1.71); + + Map humans = new HashMap<>(); + humans.put("first", human0); + humans.put("second", human1); + + Map ctx = Collections.singletonMap("ctx", humans); + + assertScript("{{#toJson}}.{{/toJson}}", ctx, + equalTo("{\"ctx\":{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}}")); + + assertScript("{{#toJson}}ctx{{/toJson}}", ctx, + equalTo("{\"first\":{\"name\":\"John Smith\",\"age\":42,\"height\":1.84},\"second\":{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}}")); + + assertScript("{{#toJson}}ctx.first{{/toJson}}", ctx, + equalTo("{\"name\":\"John Smith\",\"age\":42,\"height\":1.84}")); + + assertScript("{{#toJson}}ctx.second{{/toJson}}", ctx, + equalTo("{\"name\":\"Dave Smith\",\"age\":27,\"height\":1.71}")); + } + + public void testSimpleArrayToJSON() throws Exception { + String[] array = new String[]{"one", "two", "three"}; + Map ctx = Collections.singletonMap("array", array); + + assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"array\":[\"one\",\"two\",\"three\"]}")); + assertScript("{{#toJson}}array{{/toJson}}", ctx, equalTo("[\"one\",\"two\",\"three\"]")); + assertScript("{{#toJson}}array.0{{/toJson}}", ctx, equalTo("one")); + assertScript("{{#toJson}}array.1{{/toJson}}", ctx, equalTo("two")); + assertScript("{{#toJson}}array.2{{/toJson}}", ctx, equalTo("three")); + assertScript("{{#toJson}}array.size{{/toJson}}", ctx, equalTo("3")); + } + + public void testSimpleListToJSON() throws Exception { + List list = Arrays.asList("one", "two", "three"); + Map ctx = Collections.singletonMap("ctx", list); + + assertScript("{{#toJson}}.{{/toJson}}", ctx, equalTo("{\"ctx\":[\"one\",\"two\",\"three\"]}")); + assertScript("{{#toJson}}ctx{{/toJson}}", ctx, equalTo("[\"one\",\"two\",\"three\"]")); + assertScript("{{#toJson}}ctx.0{{/toJson}}", ctx, equalTo("one")); + assertScript("{{#toJson}}ctx.1{{/toJson}}", ctx, equalTo("two")); + assertScript("{{#toJson}}ctx.2{{/toJson}}", ctx, equalTo("three")); + assertScript("{{#toJson}}ctx.size{{/toJson}}", ctx, equalTo("3")); + } + + public void testsUnsupportedTagsToJson() { + MustacheException e = expectThrows(MustacheException.class, () -> compile("{{#toJson}}{{foo}}{{bar}}{{/toJson}}")); + assertThat(e.getMessage(), containsString("Mustache function [toJson] must contain one and only one identifier")); + + e = expectThrows(MustacheException.class, () -> compile("{{#toJson}}{{/toJson}}")); + assertThat(e.getMessage(), containsString("Mustache function [toJson] must contain one and only one identifier")); + } + + public void testEmbeddedToJSON() throws Exception { + XContentBuilder builder = jsonBuilder().startObject() + .startArray("bulks") + .startObject() + .field("index", "index-1") + .field("type", "type-1") + .field("id", 1) + .endObject() + .startObject() + .field("index", "index-2") + .field("type", "type-2") + .field("id", 2) + .endObject() + .endArray() + .endObject(); + + Map ctx = Collections.singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false).v2()); + + assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, + equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")); + + assertScript("{{#ctx.bulks}}<{{#toJson}}id{{/toJson}}>{{/ctx.bulks}}", ctx, + equalTo("<1><2>")); + } + + public void testSimpleArrayJoin() throws Exception { + String template = "{{#join}}array{{/join}}"; + assertScript(template, Collections.singletonMap("array", new String[]{"one", "two", "three"}), equalTo("one,two,three")); + assertScript(template, Collections.singletonMap("array", new int[]{1, 2, 3}), equalTo("1,2,3")); + assertScript(template, Collections.singletonMap("array", new long[]{1L, 2L, 3L}), equalTo("1,2,3")); + assertScript(template, Collections.singletonMap("array", new double[]{1.5, 2.5, 3.5}), equalTo("1.5,2.5,3.5")); + assertScript(template, Collections.singletonMap("array", new boolean[]{true, false, true}), equalTo("true,false,true")); + assertScript(template, Collections.singletonMap("array", new boolean[]{true, false, true}), equalTo("true,false,true")); + } + + public void testEmbeddedArrayJoin() throws Exception { + XContentBuilder builder = jsonBuilder().startObject() + .startArray("people") + .startObject() + .field("name", "John Smith") + .startArray("emails") + .value("john@smith.com") + .value("john.smith@email.com") + .value("jsmith@email.com") + .endArray() + .endObject() + .startObject() + .field("name", "John Doe") + .startArray("emails") + .value("john@doe.com") + .value("john.doe@email.com") + .value("jdoe@email.com") + .endArray() + .endObject() + .endArray() + .endObject(); + + Map ctx = Collections.singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false).v2()); + + assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, + equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); + + assertScript("{{#join}}ctx.people.1.emails{{/join}}", ctx, + equalTo("john@doe.com,john.doe@email.com,jdoe@email.com")); + + assertScript("{{#ctx.people}}to: {{#join}}emails{{/join}};{{/ctx.people}}", ctx, + equalTo("to: john@smith.com,john.smith@email.com,jsmith@email.com;to: john@doe.com,john.doe@email.com,jdoe@email.com;")); + } + + public void testJoinWithToJson() { + Map params = Collections.singletonMap("terms", + Arrays.asList(singletonMap("term", "foo"), singletonMap("term", "bar"))); + + assertScript("{{#join}}{{#toJson}}terms{{/toJson}}{{/join}}", params, + equalTo("[{\"term\":\"foo\"},{\"term\":\"bar\"}]")); + } + + public void testsUnsupportedTagsJoin() { + MustacheException e = expectThrows(MustacheException.class, () -> compile("{{#join}}{{/join}}")); + assertThat(e.getMessage(), containsString("Mustache function [join] must contain one and only one identifier")); + + e = expectThrows(MustacheException.class, () -> compile("{{#join delimiter='a'}}{{/join delimiter='b'}}")); + assertThat(e.getMessage(), containsString("Mismatched start/end tags")); + } + + public void testJoinWithCustomDelimiter() { + Map params = Collections.singletonMap("params", Arrays.asList(1, 2, 3, 4)); + + assertScript("{{#join delimiter=''}}params{{/join delimiter=''}}", params, equalTo("1234")); + assertScript("{{#join delimiter=','}}params{{/join delimiter=','}}", params, equalTo("1,2,3,4")); + assertScript("{{#join delimiter='/'}}params{{/join delimiter='/'}}", params, equalTo("1/2/3/4")); + assertScript("{{#join delimiter=' and '}}params{{/join delimiter=' and '}}", params, equalTo("1 and 2 and 3 and 4")); + } + + private void assertScript(String script, Map vars, Matcher matcher) { + Object result = engine.executable(new CompiledScript(INLINE, "inline", "mustache", compile(script)), vars).run(); + assertThat(result, notNullValue()); + assertThat(result, instanceOf(BytesReference.class)); + assertThat(((BytesReference) result).utf8ToString(), matcher); + } + + private Object compile(String script) { + assertThat("cannot compile null or empty script", script, not(isEmptyOrNullString())); + return engine.compile(null, script, Collections.emptyMap()); } } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml similarity index 97% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/10_basic.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml index 2a32e75331e..ab42133a2b2 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/10_basic.yaml @@ -36,7 +36,7 @@ setup: "Basic multi-search template": - do: - template.msearch: + msearch_template: body: - index: index_* - inline: '{"query": {"match": {"foo": "{{value}}"} } }' @@ -71,7 +71,7 @@ setup: "Multi-search template with errors": - do: - template.msearch: + msearch_template: body: # Search 0 is OK - index: index_* @@ -109,7 +109,7 @@ setup: - do: catch: /(.)*action_request_validation_exception(.)*template.is.missing(.)*/ - template.msearch: + msearch_template: body: # Search 0 is OK - index: index_* diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/20_stored_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml similarity index 98% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/20_stored_template.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml index 5d04449e541..ffbe9200375 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/20_stored_template.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/20_stored_template.yaml @@ -42,7 +42,7 @@ setup: - match: { acknowledged: true } - do: - template.msearch: + msearch_template: body: - index: index_* - id: stored_template_1 diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/30_file_template.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml similarity index 98% rename from modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/30_file_template.yaml rename to modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml index 0c6ad8021e1..a5d11898baf 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/template.msearch/30_file_template.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/msearch_template/30_file_template.yaml @@ -44,7 +44,7 @@ setup: - match: { template_output.size: 20 } - do: - template.msearch: + msearch_template: body: - index: index_* - file: template_1 diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index ca469eed69a..a2e7b921300 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -40,6 +40,7 @@ DOT: '.' -> mode(AFTER_DOT); COMMA: ','; SEMICOLON: ';'; IF: 'if'; +IN: 'in'; ELSE: 'else'; WHILE: 'while'; DO: 'do'; @@ -52,6 +53,7 @@ TRY: 'try'; CATCH: 'catch'; THROW: 'throw'; THIS: 'this'; +INSTANCEOF: 'instanceof'; BOOLNOT: '!'; BWNOT: '~'; diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.tokens b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens index 3968464f015..dc62fe36b28 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.tokens +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.tokens @@ -10,74 +10,76 @@ DOT=9 COMMA=10 SEMICOLON=11 IF=12 -ELSE=13 -WHILE=14 -DO=15 -FOR=16 -CONTINUE=17 -BREAK=18 -RETURN=19 -NEW=20 -TRY=21 -CATCH=22 -THROW=23 -THIS=24 -BOOLNOT=25 -BWNOT=26 -MUL=27 -DIV=28 -REM=29 -ADD=30 -SUB=31 -LSH=32 -RSH=33 -USH=34 -LT=35 -LTE=36 -GT=37 -GTE=38 -EQ=39 -EQR=40 -NE=41 -NER=42 -BWAND=43 -XOR=44 -BWOR=45 -BOOLAND=46 -BOOLOR=47 -COND=48 -COLON=49 -REF=50 -ARROW=51 -FIND=52 -MATCH=53 -INCR=54 -DECR=55 -ASSIGN=56 -AADD=57 -ASUB=58 -AMUL=59 -ADIV=60 -AREM=61 -AAND=62 -AXOR=63 -AOR=64 -ALSH=65 -ARSH=66 -AUSH=67 -OCTAL=68 -HEX=69 -INTEGER=70 -DECIMAL=71 -STRING=72 -REGEX=73 -TRUE=74 -FALSE=75 -NULL=76 -TYPE=77 -ID=78 -DOTINTEGER=79 -DOTID=80 +IN=13 +ELSE=14 +WHILE=15 +DO=16 +FOR=17 +CONTINUE=18 +BREAK=19 +RETURN=20 +NEW=21 +TRY=22 +CATCH=23 +THROW=24 +THIS=25 +INSTANCEOF=26 +BOOLNOT=27 +BWNOT=28 +MUL=29 +DIV=30 +REM=31 +ADD=32 +SUB=33 +LSH=34 +RSH=35 +USH=36 +LT=37 +LTE=38 +GT=39 +GTE=40 +EQ=41 +EQR=42 +NE=43 +NER=44 +BWAND=45 +XOR=46 +BWOR=47 +BOOLAND=48 +BOOLOR=49 +COND=50 +COLON=51 +REF=52 +ARROW=53 +FIND=54 +MATCH=55 +INCR=56 +DECR=57 +ASSIGN=58 +AADD=59 +ASUB=60 +AMUL=61 +ADIV=62 +AREM=63 +AAND=64 +AXOR=65 +AOR=66 +ALSH=67 +ARSH=68 +AUSH=69 +OCTAL=70 +HEX=71 +INTEGER=72 +DECIMAL=73 +STRING=74 +REGEX=75 +TRUE=76 +FALSE=77 +NULL=78 +TYPE=79 +ID=80 +DOTINTEGER=81 +DOTID=82 '{'=3 '}'=4 '['=5 @@ -88,61 +90,63 @@ DOTID=80 ','=10 ';'=11 'if'=12 -'else'=13 -'while'=14 -'do'=15 -'for'=16 -'continue'=17 -'break'=18 -'return'=19 -'new'=20 -'try'=21 -'catch'=22 -'throw'=23 -'this'=24 -'!'=25 -'~'=26 -'*'=27 -'/'=28 -'%'=29 -'+'=30 -'-'=31 -'<<'=32 -'>>'=33 -'>>>'=34 -'<'=35 -'<='=36 -'>'=37 -'>='=38 -'=='=39 -'==='=40 -'!='=41 -'!=='=42 -'&'=43 -'^'=44 -'|'=45 -'&&'=46 -'||'=47 -'?'=48 -':'=49 -'::'=50 -'->'=51 -'=~'=52 -'==~'=53 -'++'=54 -'--'=55 -'='=56 -'+='=57 -'-='=58 -'*='=59 -'/='=60 -'%='=61 -'&='=62 -'^='=63 -'|='=64 -'<<='=65 -'>>='=66 -'>>>='=67 -'true'=74 -'false'=75 -'null'=76 +'in'=13 +'else'=14 +'while'=15 +'do'=16 +'for'=17 +'continue'=18 +'break'=19 +'return'=20 +'new'=21 +'try'=22 +'catch'=23 +'throw'=24 +'this'=25 +'instanceof'=26 +'!'=27 +'~'=28 +'*'=29 +'/'=30 +'%'=31 +'+'=32 +'-'=33 +'<<'=34 +'>>'=35 +'>>>'=36 +'<'=37 +'<='=38 +'>'=39 +'>='=40 +'=='=41 +'==='=42 +'!='=43 +'!=='=44 +'&'=45 +'^'=46 +'|'=47 +'&&'=48 +'||'=49 +'?'=50 +':'=51 +'::'=52 +'->'=53 +'=~'=54 +'==~'=55 +'++'=56 +'--'=57 +'='=58 +'+='=59 +'-='=60 +'*='=61 +'/='=62 +'%='=63 +'&='=64 +'^='=65 +'|='=66 +'<<='=67 +'>>='=68 +'>>>='=69 +'true'=76 +'false'=77 +'null'=78 diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.g4 b/modules/lang-painless/src/main/antlr/PainlessParser.g4 index 10471056acd..b102734a4f4 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessParser.g4 @@ -42,6 +42,7 @@ statement | DO block WHILE LP expression RP delimiter # do | FOR LP initializer? SEMICOLON expression? SEMICOLON afterthought? RP ( trailer | empty ) # for | FOR LP decltype ID COLON expression RP trailer # each + | FOR LP ID IN expression RP trailer # ineach | declaration delimiter # decl | CONTINUE delimiter # continue | BREAK delimiter # break @@ -105,6 +106,7 @@ expression returns [boolean s = true] | expression ( FIND | MATCH ) expression { $s = false; } # binary | expression ( LSH | RSH | USH ) expression { $s = false; } # binary | expression ( LT | LTE | GT | GTE ) expression { $s = false; } # comp + | expression INSTANCEOF decltype { $s = false; } # instanceof | expression ( EQ | EQR | NE | NER ) expression { $s = false; } # comp | expression BWAND expression { $s = false; } # binary | expression XOR expression { $s = false; } # binary diff --git a/modules/lang-painless/src/main/antlr/PainlessParser.tokens b/modules/lang-painless/src/main/antlr/PainlessParser.tokens index 3968464f015..dc62fe36b28 100644 --- a/modules/lang-painless/src/main/antlr/PainlessParser.tokens +++ b/modules/lang-painless/src/main/antlr/PainlessParser.tokens @@ -10,74 +10,76 @@ DOT=9 COMMA=10 SEMICOLON=11 IF=12 -ELSE=13 -WHILE=14 -DO=15 -FOR=16 -CONTINUE=17 -BREAK=18 -RETURN=19 -NEW=20 -TRY=21 -CATCH=22 -THROW=23 -THIS=24 -BOOLNOT=25 -BWNOT=26 -MUL=27 -DIV=28 -REM=29 -ADD=30 -SUB=31 -LSH=32 -RSH=33 -USH=34 -LT=35 -LTE=36 -GT=37 -GTE=38 -EQ=39 -EQR=40 -NE=41 -NER=42 -BWAND=43 -XOR=44 -BWOR=45 -BOOLAND=46 -BOOLOR=47 -COND=48 -COLON=49 -REF=50 -ARROW=51 -FIND=52 -MATCH=53 -INCR=54 -DECR=55 -ASSIGN=56 -AADD=57 -ASUB=58 -AMUL=59 -ADIV=60 -AREM=61 -AAND=62 -AXOR=63 -AOR=64 -ALSH=65 -ARSH=66 -AUSH=67 -OCTAL=68 -HEX=69 -INTEGER=70 -DECIMAL=71 -STRING=72 -REGEX=73 -TRUE=74 -FALSE=75 -NULL=76 -TYPE=77 -ID=78 -DOTINTEGER=79 -DOTID=80 +IN=13 +ELSE=14 +WHILE=15 +DO=16 +FOR=17 +CONTINUE=18 +BREAK=19 +RETURN=20 +NEW=21 +TRY=22 +CATCH=23 +THROW=24 +THIS=25 +INSTANCEOF=26 +BOOLNOT=27 +BWNOT=28 +MUL=29 +DIV=30 +REM=31 +ADD=32 +SUB=33 +LSH=34 +RSH=35 +USH=36 +LT=37 +LTE=38 +GT=39 +GTE=40 +EQ=41 +EQR=42 +NE=43 +NER=44 +BWAND=45 +XOR=46 +BWOR=47 +BOOLAND=48 +BOOLOR=49 +COND=50 +COLON=51 +REF=52 +ARROW=53 +FIND=54 +MATCH=55 +INCR=56 +DECR=57 +ASSIGN=58 +AADD=59 +ASUB=60 +AMUL=61 +ADIV=62 +AREM=63 +AAND=64 +AXOR=65 +AOR=66 +ALSH=67 +ARSH=68 +AUSH=69 +OCTAL=70 +HEX=71 +INTEGER=72 +DECIMAL=73 +STRING=74 +REGEX=75 +TRUE=76 +FALSE=77 +NULL=78 +TYPE=79 +ID=80 +DOTINTEGER=81 +DOTID=82 '{'=3 '}'=4 '['=5 @@ -88,61 +90,63 @@ DOTID=80 ','=10 ';'=11 'if'=12 -'else'=13 -'while'=14 -'do'=15 -'for'=16 -'continue'=17 -'break'=18 -'return'=19 -'new'=20 -'try'=21 -'catch'=22 -'throw'=23 -'this'=24 -'!'=25 -'~'=26 -'*'=27 -'/'=28 -'%'=29 -'+'=30 -'-'=31 -'<<'=32 -'>>'=33 -'>>>'=34 -'<'=35 -'<='=36 -'>'=37 -'>='=38 -'=='=39 -'==='=40 -'!='=41 -'!=='=42 -'&'=43 -'^'=44 -'|'=45 -'&&'=46 -'||'=47 -'?'=48 -':'=49 -'::'=50 -'->'=51 -'=~'=52 -'==~'=53 -'++'=54 -'--'=55 -'='=56 -'+='=57 -'-='=58 -'*='=59 -'/='=60 -'%='=61 -'&='=62 -'^='=63 -'|='=64 -'<<='=65 -'>>='=66 -'>>>='=67 -'true'=74 -'false'=75 -'null'=76 +'in'=13 +'else'=14 +'while'=15 +'do'=16 +'for'=17 +'continue'=18 +'break'=19 +'return'=20 +'new'=21 +'try'=22 +'catch'=23 +'throw'=24 +'this'=25 +'instanceof'=26 +'!'=27 +'~'=28 +'*'=29 +'/'=30 +'%'=31 +'+'=32 +'-'=33 +'<<'=34 +'>>'=35 +'>>>'=36 +'<'=37 +'<='=38 +'>'=39 +'>='=40 +'=='=41 +'==='=42 +'!='=43 +'!=='=44 +'&'=45 +'^'=46 +'|'=47 +'&&'=48 +'||'=49 +'?'=50 +':'=51 +'::'=52 +'->'=53 +'=~'=54 +'==~'=55 +'++'=56 +'--'=57 +'='=58 +'+='=59 +'-='=60 +'*='=61 +'/='=62 +'%='=63 +'&='=64 +'^='=65 +'|='=66 +'<<='=67 +'>>='=68 +'>>>='=69 +'true'=76 +'false'=77 +'null'=78 diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Augmentation.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Augmentation.java index 4bca673b4dc..9302f3c899c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Augmentation.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Augmentation.java @@ -34,6 +34,7 @@ import java.util.function.ObjIntConsumer; import java.util.function.Predicate; import java.util.function.ToDoubleFunction; import java.util.regex.Matcher; +import java.util.regex.Pattern; /** Additional methods added to classes. These must be static methods with receiver as first argument */ public class Augmentation { @@ -64,6 +65,30 @@ public class Augmentation { return false; } + /** Converts this Iterable to a Collection. Returns the original Iterable if it is already a Collection. */ + public static Collection asCollection(Iterable receiver) { + if (receiver instanceof Collection) { + return (Collection)receiver; + } + List list = new ArrayList<>(); + for (T t : receiver) { + list.add(t); + } + return list; + } + + /** Converts this Iterable to a List. Returns the original Iterable if it is already a List. */ + public static List asList(Iterable receiver) { + if (receiver instanceof List) { + return (List)receiver; + } + List list = new ArrayList<>(); + for (T t : receiver) { + list.add(t); + } + return list; + } + /** Counts the number of occurrences which satisfy the given predicate from inside this Iterable. */ public static int count(Iterable receiver, Predicate predicate) { int count = 0; @@ -155,6 +180,17 @@ public class Augmentation { return sb.toString(); } + /** + * Sums the result of an Iterable + */ + public static double sum(Iterable receiver) { + double sum = 0; + for (T t : receiver) { + sum += t.doubleValue(); + } + return sum; + } + /** * Sums the result of applying a function to each item of an Iterable. */ @@ -407,4 +443,47 @@ public class Augmentation { } return map; } + + // CharSequence augmentation + /** + * Replace all matches. Similar to {@link Matcher#replaceAll(String)} but allows you to customize the replacement based on the match. + */ + public static String replaceAll(CharSequence receiver, Pattern pattern, Function replacementBuilder) { + Matcher m = pattern.matcher(receiver); + if (false == m.find()) { + // CharSequqence's toString is *supposed* to always return the characters in the sequence as a String + return receiver.toString(); + } + StringBuffer result = new StringBuffer(initialBufferForReplaceWith(receiver)); + do { + m.appendReplacement(result, Matcher.quoteReplacement(replacementBuilder.apply(m))); + } while (m.find()); + m.appendTail(result); + return result.toString(); + } + + /** + * Replace the first match. Similar to {@link Matcher#replaceFirst(String)} but allows you to customize the replacement based on the + * match. + */ + public static String replaceFirst(CharSequence receiver, Pattern pattern, Function replacementBuilder) { + Matcher m = pattern.matcher(receiver); + if (false == m.find()) { + // CharSequqence's toString is *supposed* to always return the characters in the sequence as a String + return receiver.toString(); + } + StringBuffer result = new StringBuffer(initialBufferForReplaceWith(receiver)); + m.appendReplacement(result, Matcher.quoteReplacement(replacementBuilder.apply(m))); + m.appendTail(result); + return result.toString(); + } + + /** + * The initial size of the {@link StringBuilder} used for {@link #replaceFirst(CharSequence, Pattern, Function)} and + * {@link #replaceAll(CharSequence, Pattern, Function)} for a particular sequence. We ape + * {{@link StringBuilder#StringBuilder(CharSequence)} here and add 16 extra chars to the buffer to have a little room for growth. + */ + private static int initialBufferForReplaceWith(CharSequence seq) { + return seq.length() + 16; + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java index 94d7a1305d6..f0e1bde74d0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java @@ -60,7 +60,7 @@ public final class CompilerSettings { * in a script before an exception is thrown. This attempts to prevent infinite loops. Note if * the counter is set to 0, no loop counter will be written. */ - public final int getMaxLoopCounter() { + public int getMaxLoopCounter() { return maxLoopCounter; } @@ -68,7 +68,7 @@ public final class CompilerSettings { * Set the cumulative total number of statements that can be made in all loops. * @see #getMaxLoopCounter */ - public final void setMaxLoopCounter(int max) { + public void setMaxLoopCounter(int max) { this.maxLoopCounter = max; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index e57aad862aa..69a74301bc8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -646,7 +646,7 @@ public final class Definition { } } - private final void addStruct(final String name, final Class clazz) { + private void addStruct(final String name, final Class clazz) { if (!name.matches("^[_a-zA-Z][\\.,_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid struct name [" + name + "]."); } @@ -661,7 +661,7 @@ public final class Definition { simpleTypesMap.put(name, getTypeInternal(name)); } - private final void addConstructorInternal(final String struct, final String name, final Type[] args) { + private void addConstructorInternal(final String struct, final String name, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -734,7 +734,7 @@ public final class Definition { * * no spaces allowed. */ - private final void addSignature(String className, String signature) { + private void addSignature(String className, String signature) { String elements[] = signature.split("\u0020"); if (elements.length != 2) { throw new IllegalArgumentException("Malformed signature: " + signature); @@ -774,8 +774,8 @@ public final class Definition { } } - private final void addMethodInternal(String struct, String name, boolean augmentation, - Type rtn, Type[] args) { + private void addMethodInternal(String struct, String name, boolean augmentation, + Type rtn, Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -858,7 +858,7 @@ public final class Definition { } } - private final void addFieldInternal(String struct, String name, Type type) { + private void addFieldInternal(String struct, String name, Type type) { final Struct owner = structsMap.get(struct); if (owner == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index ae54b6e6271..b02ea085904 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -74,7 +74,11 @@ public final class Locals { Locals locals = new Locals(programScope, returnType); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); - boolean isCapture = i < captureCount; + // TODO: allow non-captures to be r/w: + // boolean isCapture = i < captureCount; + // currently, this cannot be allowed, as we swap in real types, + // but that can prevent a store of a different type... + boolean isCapture = true; locals.addVariable(parameter.location, parameter.type, parameter.name, isCapture); } // Loop counter to catch infinite loops. Internal use only. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/SimpleChecksAdapter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/SimpleChecksAdapter.java new file mode 100644 index 00000000000..aa6d121945b --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/SimpleChecksAdapter.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.Label; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; +import org.objectweb.asm.util.CheckClassAdapter; +import org.objectweb.asm.util.CheckMethodAdapter; + +import java.util.HashMap; + +/** + * A CheckClassAdapter that does not use setAccessible to try to access private fields of Label! + *

+ * This means jump insns are not checked, but we still get all the other checking. + */ +// TODO: we should really try to get this fixed in ASM! +public class SimpleChecksAdapter extends CheckClassAdapter { + + public SimpleChecksAdapter(ClassVisitor cv) { + super(WriterConstants.ASM_VERSION, cv, false); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { + MethodVisitor in = cv.visitMethod(access, name, desc, signature, exceptions); + CheckMethodAdapter checker = new CheckMethodAdapter(WriterConstants.ASM_VERSION, in, new HashMap()) { + @Override + public void visitJumpInsn(int opcode, Label label) { + mv.visitJumpInsn(opcode, label); + } + + @Override + public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { + mv.visitTryCatchBlock(start, end, handler, type); + } + }; + checker.version = WriterConstants.CLASS_VERSION; + return checker; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index e2bf804c181..c546207b1ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -43,35 +43,37 @@ import java.util.regex.Pattern; */ public final class WriterConstants { - public final static String BASE_CLASS_NAME = Executable.class.getName(); - public final static Type BASE_CLASS_TYPE = Type.getType(Executable.class); + public static final int CLASS_VERSION = Opcodes.V1_8; + public static final int ASM_VERSION = Opcodes.ASM5; + public static final String BASE_CLASS_NAME = Executable.class.getName(); + public static final Type BASE_CLASS_TYPE = Type.getType(Executable.class); - public final static String CLASS_NAME = BASE_CLASS_NAME + "$Script"; - public final static Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); + public static final String CLASS_NAME = BASE_CLASS_NAME + "$Script"; + public static final Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); - public final static Method CONSTRUCTOR = getAsmMethod(void.class, "", String.class, String.class, BitSet.class); - public final static Method CLINIT = getAsmMethod(void.class, ""); - public final static Method EXECUTE = + public static final Method CONSTRUCTOR = getAsmMethod(void.class, "", String.class, String.class, BitSet.class); + public static final Method CLINIT = getAsmMethod(void.class, ""); + public static final Method EXECUTE = getAsmMethod(Object.class, "execute", Map.class, Scorer.class, LeafDocLookup.class, Object.class); - public final static Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); + public static final Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); - public final static Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class); - public final static Type SCORER_TYPE = Type.getType(Scorer.class); - public final static Method SCORER_SCORE = getAsmMethod(float.class, "score"); + public static final Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class); + public static final Type SCORER_TYPE = Type.getType(Scorer.class); + public static final Method SCORER_SCORE = getAsmMethod(float.class, "score"); - public final static Type MAP_TYPE = Type.getType(Map.class); - public final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); + public static final Type MAP_TYPE = Type.getType(Map.class); + public static final Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); - public final static Type ITERATOR_TYPE = Type.getType(Iterator.class); - public final static Method ITERATOR_HASNEXT = getAsmMethod(boolean.class, "hasNext"); - public final static Method ITERATOR_NEXT = getAsmMethod(Object.class, "next"); + public static final Type ITERATOR_TYPE = Type.getType(Iterator.class); + public static final Method ITERATOR_HASNEXT = getAsmMethod(boolean.class, "hasNext"); + public static final Method ITERATOR_NEXT = getAsmMethod(Object.class, "next"); - public final static Type UTILITY_TYPE = Type.getType(Utility.class); - public final static Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); - public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); + public static final Type UTILITY_TYPE = Type.getType(Utility.class); + public static final Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); + public static final Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); - public final static Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class); + public static final Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class); public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class); @@ -81,46 +83,46 @@ public final class WriterConstants { * because it can do it statically. This is both faster and prevents the script from doing something super slow like building a regex * per time it is run. */ - public final static Method PATTERN_COMPILE = getAsmMethod(Pattern.class, "compile", String.class, int.class); - public final static Method PATTERN_MATCHER = getAsmMethod(Matcher.class, "matcher", CharSequence.class); - public final static Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); - public final static Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); + public static final Method PATTERN_COMPILE = getAsmMethod(Pattern.class, "compile", String.class, int.class); + public static final Method PATTERN_MATCHER = getAsmMethod(Matcher.class, "matcher", CharSequence.class); + public static final Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); + public static final Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); /** dynamic callsite bootstrap signature */ - final static MethodType DEF_BOOTSTRAP_TYPE = + static final MethodType DEF_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); - final static Handle DEF_BOOTSTRAP_HANDLE = + static final Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DefBootstrap.class), "bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); - public final static Type DEF_UTIL_TYPE = Type.getType(Def.class); - public final static Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); - public final static Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); - public final static Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); - public final static Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); - public final static Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); - public final static Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); - public final static Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); - public final static Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); - public final static Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); - public final static Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); - public final static Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); - public final static Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); - public final static Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); - public final static Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); - public final static Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); + public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); + public static final Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); + public static final Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); + public static final Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); + public static final Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); + public static final Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); + public static final Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); + public static final Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); + public static final Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); + public static final Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); + public static final Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); + public static final Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); + public static final Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); + public static final Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); + public static final Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); + public static final Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); /** invokedynamic bootstrap for lambda expression/method references */ - public final static MethodType LAMBDA_BOOTSTRAP_TYPE = + public static final MethodType LAMBDA_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, Object[].class); - public final static Handle LAMBDA_BOOTSTRAP_HANDLE = + public static final Handle LAMBDA_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaMetafactory.class), "altMetafactory", LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); /** dynamic invokedynamic bootstrap for indy string concats (Java 9+) */ - public final static Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; + public static final Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; static { Handle bs; try { @@ -137,24 +139,24 @@ public final class WriterConstants { INDY_STRING_CONCAT_BOOTSTRAP_HANDLE = bs; } - public final static int MAX_INDY_STRING_CONCAT_ARGS = 200; + public static final int MAX_INDY_STRING_CONCAT_ARGS = 200; - public final static Type STRING_TYPE = Type.getType(String.class); - public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); + public static final Type STRING_TYPE = Type.getType(String.class); + public static final Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); - public final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); - public final static Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); - public final static Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); - public final static Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); - public final static Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); - public final static Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); - public final static Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); - public final static Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); - public final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); - public final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); + public static final Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); + public static final Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); + public static final Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); + public static final Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); + public static final Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); + public static final Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); + public static final Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); + public static final Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); + public static final Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); + public static final Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); - public final static Type OBJECTS_TYPE = Type.getType(Objects.class); - public final static Method EQUALS = getAsmMethod(boolean.class, "equals", Object.class, Object.class); + public static final Type OBJECTS_TYPE = Type.getType(Objects.class); + public static final Method EQUALS = getAsmMethod(boolean.class, "equals", Object.class, Object.class); private static Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index f512ad20d7e..eae7150b9be 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -21,15 +21,16 @@ class PainlessLexer extends Lexer { new PredictionContextCache(); public static final int WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, THIS=24, BOOLNOT=25, - BWNOT=26, MUL=27, DIV=28, REM=29, ADD=30, SUB=31, LSH=32, RSH=33, USH=34, - LT=35, LTE=36, GT=37, GTE=38, EQ=39, EQR=40, NE=41, NER=42, BWAND=43, - XOR=44, BWOR=45, BOOLAND=46, BOOLOR=47, COND=48, COLON=49, REF=50, ARROW=51, - FIND=52, MATCH=53, INCR=54, DECR=55, ASSIGN=56, AADD=57, ASUB=58, AMUL=59, - ADIV=60, AREM=61, AAND=62, AXOR=63, AOR=64, ALSH=65, ARSH=66, AUSH=67, - OCTAL=68, HEX=69, INTEGER=70, DECIMAL=71, STRING=72, REGEX=73, TRUE=74, - FALSE=75, NULL=76, TYPE=77, ID=78, DOTINTEGER=79, DOTID=80; + COMMA=10, SEMICOLON=11, IF=12, IN=13, ELSE=14, WHILE=15, DO=16, FOR=17, + CONTINUE=18, BREAK=19, RETURN=20, NEW=21, TRY=22, CATCH=23, THROW=24, + THIS=25, INSTANCEOF=26, BOOLNOT=27, BWNOT=28, MUL=29, DIV=30, REM=31, + ADD=32, SUB=33, LSH=34, RSH=35, USH=36, LT=37, LTE=38, GT=39, GTE=40, + EQ=41, EQR=42, NE=43, NER=44, BWAND=45, XOR=46, BWOR=47, BOOLAND=48, BOOLOR=49, + COND=50, COLON=51, REF=52, ARROW=53, FIND=54, MATCH=55, INCR=56, DECR=57, + ASSIGN=58, AADD=59, ASUB=60, AMUL=61, ADIV=62, AREM=63, AAND=64, AXOR=65, + AOR=66, ALSH=67, ARSH=68, AUSH=69, OCTAL=70, HEX=71, INTEGER=72, DECIMAL=73, + STRING=74, REGEX=75, TRUE=76, FALSE=77, NULL=78, TYPE=79, ID=80, DOTINTEGER=81, + DOTID=82; public static final int AFTER_DOT = 1; public static String[] modeNames = { "DEFAULT_MODE", "AFTER_DOT" @@ -37,10 +38,10 @@ class PainlessLexer extends Lexer { public static final String[] ruleNames = { "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "BOOLNOT", - "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", - "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", + "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", + "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "INSTANCEOF", + "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", + "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", "DECIMAL", @@ -50,25 +51,25 @@ class PainlessLexer extends Lexer { private static final String[] _LITERAL_NAMES = { null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", "'!'", "'~'", - "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", - "'>'", "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", - "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", "'=~'", "'==~'", "'++'", - "'--'", "'='", "'+='", "'-='", "'*='", "'/='", "'%='", "'&='", "'^='", - "'|='", "'<<='", "'>>='", "'>>>='", null, null, null, null, null, null, - "'true'", "'false'", "'null'" + "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", + "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", + "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", + "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, + null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "BOOLNOT", - "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", - "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", - "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", "MATCH", - "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", - "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", "DECIMAL", - "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", + "DOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", + "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", + "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", + "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", + "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", + "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", + "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", + "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -128,11 +129,11 @@ class PainlessLexer extends Lexer { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 27: + case 29: return DIV_sempred((RuleContext)_localctx, predIndex); - case 72: + case 74: return REGEX_sempred((RuleContext)_localctx, predIndex); - case 76: + case 78: return TYPE_sempred((RuleContext)_localctx, predIndex); } return true; @@ -160,7 +161,7 @@ class PainlessLexer extends Lexer { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2R\u0239\b\1\b\1\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2T\u024b\b\1\b\1\4"+ "\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n"+ "\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+ "\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+ @@ -169,196 +170,202 @@ class PainlessLexer extends Lexer { "+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+ "\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+ "=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+ - "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\3\2\6\2\u00a6\n\2"+ - "\r\2\16\2\u00a7\3\2\3\2\3\3\3\3\3\3\3\3\7\3\u00b0\n\3\f\3\16\3\u00b3\13"+ - "\3\3\3\3\3\3\3\3\3\3\3\7\3\u00ba\n\3\f\3\16\3\u00bd\13\3\3\3\3\3\5\3\u00c1"+ - "\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3"+ - "\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17"+ - "\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\22\3\22\3\22"+ - "\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24"+ - "\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31"+ - "\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\35\3\36\3\36\3\37"+ - "\3\37\3 \3 \3!\3!\3!\3\"\3\"\3\"\3#\3#\3#\3#\3$\3$\3%\3%\3%\3&\3&\3\'"+ - "\3\'\3\'\3(\3(\3(\3)\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3,\3,\3-\3-\3.\3.\3"+ - "/\3/\3/\3\60\3\60\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\63\3\64\3\64\3"+ - "\64\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\67\3\67\3\67\38\38\38\39\39\3"+ - ":\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3?\3?\3?\3@\3@\3@\3A\3A\3"+ - "A\3B\3B\3B\3B\3C\3C\3C\3C\3D\3D\3D\3D\3D\3E\3E\6E\u019a\nE\rE\16E\u019b"+ - "\3E\5E\u019f\nE\3F\3F\3F\6F\u01a4\nF\rF\16F\u01a5\3F\5F\u01a9\nF\3G\3"+ - "G\3G\7G\u01ae\nG\fG\16G\u01b1\13G\5G\u01b3\nG\3G\5G\u01b6\nG\3H\3H\3H"+ - "\7H\u01bb\nH\fH\16H\u01be\13H\5H\u01c0\nH\3H\3H\6H\u01c4\nH\rH\16H\u01c5"+ - "\5H\u01c8\nH\3H\3H\5H\u01cc\nH\3H\6H\u01cf\nH\rH\16H\u01d0\5H\u01d3\n"+ - "H\3H\5H\u01d6\nH\3I\3I\3I\3I\3I\3I\7I\u01de\nI\fI\16I\u01e1\13I\3I\3I"+ - "\3I\3I\3I\3I\3I\7I\u01ea\nI\fI\16I\u01ed\13I\3I\5I\u01f0\nI\3J\3J\3J\3"+ - "J\6J\u01f6\nJ\rJ\16J\u01f7\3J\3J\7J\u01fc\nJ\fJ\16J\u01ff\13J\3J\3J\3"+ - "K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3N\3N\3N\3N\7N\u0217\n"+ - "N\fN\16N\u021a\13N\3N\3N\3O\3O\7O\u0220\nO\fO\16O\u0223\13O\3P\3P\3P\7"+ - "P\u0228\nP\fP\16P\u022b\13P\5P\u022d\nP\3P\3P\3Q\3Q\7Q\u0233\nQ\fQ\16"+ - "Q\u0236\13Q\3Q\3Q\6\u00b1\u00bb\u01df\u01eb\2R\4\3\6\4\b\5\n\6\f\7\16"+ - "\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21\"\22$\23&\24(\25*\26"+ - ",\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!B\"D#F$H%J&L\'N(P)R*T"+ - "+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:t;v|?~@\u0080A\u0082"+ - "B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I\u0092J\u0094K\u0096"+ - "L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\4\2\3\24\5\2\13\f\17\17\""+ - "\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5\2\62;CHch\3\2\63;\3\2\62;\b"+ - "\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2$$^^\4\2\f\f\61\61\3\2\f\f\t"+ - "\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac|\u0259\2\4\3\2\2\2\2\6\3\2\2"+ - "\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2\16\3\2\2\2\2\20\3\2\2\2\2\22"+ - "\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3\2\2\2\2\32\3\2\2\2\2\34\3\2"+ - "\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2$\3\2\2\2\2&\3\2\2\2\2(\3\2"+ - "\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60\3\2\2\2\2\62\3\2\2\2\2\64\3"+ - "\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2\2<\3\2\2\2\2>\3\2\2\2\2@\3\2"+ - "\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2"+ - "\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z"+ - "\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2\2\2b\3\2\2\2\2d\3\2\2\2\2f\3"+ - "\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2n\3\2\2\2\2p\3\2\2\2\2r\3\2\2"+ - "\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2"+ - "\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3\2\2\2\2\u0086\3\2\2\2\2\u0088"+ - "\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2\2\u008e\3\2\2\2\2\u0090\3\2\2"+ - "\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096\3\2\2\2\2\u0098\3\2\2\2\2\u009a"+ - "\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2\2\3\u00a0\3\2\2\2\3\u00a2\3\2\2"+ - "\2\4\u00a5\3\2\2\2\6\u00c0\3\2\2\2\b\u00c4\3\2\2\2\n\u00c6\3\2\2\2\f\u00c8"+ - "\3\2\2\2\16\u00ca\3\2\2\2\20\u00cc\3\2\2\2\22\u00ce\3\2\2\2\24\u00d0\3"+ - "\2\2\2\26\u00d4\3\2\2\2\30\u00d6\3\2\2\2\32\u00d8\3\2\2\2\34\u00db\3\2"+ - "\2\2\36\u00e0\3\2\2\2 \u00e6\3\2\2\2\"\u00e9\3\2\2\2$\u00ed\3\2\2\2&\u00f6"+ - "\3\2\2\2(\u00fc\3\2\2\2*\u0103\3\2\2\2,\u0107\3\2\2\2.\u010b\3\2\2\2\60"+ - "\u0111\3\2\2\2\62\u0117\3\2\2\2\64\u011c\3\2\2\2\66\u011e\3\2\2\28\u0120"+ - "\3\2\2\2:\u0122\3\2\2\2<\u0125\3\2\2\2>\u0127\3\2\2\2@\u0129\3\2\2\2B"+ - "\u012b\3\2\2\2D\u012e\3\2\2\2F\u0131\3\2\2\2H\u0135\3\2\2\2J\u0137\3\2"+ - "\2\2L\u013a\3\2\2\2N\u013c\3\2\2\2P\u013f\3\2\2\2R\u0142\3\2\2\2T\u0146"+ - "\3\2\2\2V\u0149\3\2\2\2X\u014d\3\2\2\2Z\u014f\3\2\2\2\\\u0151\3\2\2\2"+ - "^\u0153\3\2\2\2`\u0156\3\2\2\2b\u0159\3\2\2\2d\u015b\3\2\2\2f\u015d\3"+ - "\2\2\2h\u0160\3\2\2\2j\u0163\3\2\2\2l\u0166\3\2\2\2n\u016a\3\2\2\2p\u016d"+ - "\3\2\2\2r\u0170\3\2\2\2t\u0172\3\2\2\2v\u0175\3\2\2\2x\u0178\3\2\2\2z"+ - "\u017b\3\2\2\2|\u017e\3\2\2\2~\u0181\3\2\2\2\u0080\u0184\3\2\2\2\u0082"+ - "\u0187\3\2\2\2\u0084\u018a\3\2\2\2\u0086\u018e\3\2\2\2\u0088\u0192\3\2"+ - "\2\2\u008a\u0197\3\2\2\2\u008c\u01a0\3\2\2\2\u008e\u01b2\3\2\2\2\u0090"+ - "\u01bf\3\2\2\2\u0092\u01ef\3\2\2\2\u0094\u01f1\3\2\2\2\u0096\u0202\3\2"+ - "\2\2\u0098\u0207\3\2\2\2\u009a\u020d\3\2\2\2\u009c\u0212\3\2\2\2\u009e"+ - "\u021d\3\2\2\2\u00a0\u022c\3\2\2\2\u00a2\u0230\3\2\2\2\u00a4\u00a6\t\2"+ - "\2\2\u00a5\u00a4\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a7"+ - "\u00a8\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00aa\b\2\2\2\u00aa\5\3\2\2\2"+ - "\u00ab\u00ac\7\61\2\2\u00ac\u00ad\7\61\2\2\u00ad\u00b1\3\2\2\2\u00ae\u00b0"+ - "\13\2\2\2\u00af\u00ae\3\2\2\2\u00b0\u00b3\3\2\2\2\u00b1\u00b2\3\2\2\2"+ - "\u00b1\u00af\3\2\2\2\u00b2\u00b4\3\2\2\2\u00b3\u00b1\3\2\2\2\u00b4\u00c1"+ - "\t\3\2\2\u00b5\u00b6\7\61\2\2\u00b6\u00b7\7,\2\2\u00b7\u00bb\3\2\2\2\u00b8"+ - "\u00ba\13\2\2\2\u00b9\u00b8\3\2\2\2\u00ba\u00bd\3\2\2\2\u00bb\u00bc\3"+ - "\2\2\2\u00bb\u00b9\3\2\2\2\u00bc\u00be\3\2\2\2\u00bd\u00bb\3\2\2\2\u00be"+ - "\u00bf\7,\2\2\u00bf\u00c1\7\61\2\2\u00c0\u00ab\3\2\2\2\u00c0\u00b5\3\2"+ - "\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c3\b\3\2\2\u00c3\7\3\2\2\2\u00c4\u00c5"+ - "\7}\2\2\u00c5\t\3\2\2\2\u00c6\u00c7\7\177\2\2\u00c7\13\3\2\2\2\u00c8\u00c9"+ - "\7]\2\2\u00c9\r\3\2\2\2\u00ca\u00cb\7_\2\2\u00cb\17\3\2\2\2\u00cc\u00cd"+ - "\7*\2\2\u00cd\21\3\2\2\2\u00ce\u00cf\7+\2\2\u00cf\23\3\2\2\2\u00d0\u00d1"+ - "\7\60\2\2\u00d1\u00d2\3\2\2\2\u00d2\u00d3\b\n\3\2\u00d3\25\3\2\2\2\u00d4"+ - "\u00d5\7.\2\2\u00d5\27\3\2\2\2\u00d6\u00d7\7=\2\2\u00d7\31\3\2\2\2\u00d8"+ - "\u00d9\7k\2\2\u00d9\u00da\7h\2\2\u00da\33\3\2\2\2\u00db\u00dc\7g\2\2\u00dc"+ - "\u00dd\7n\2\2\u00dd\u00de\7u\2\2\u00de\u00df\7g\2\2\u00df\35\3\2\2\2\u00e0"+ - "\u00e1\7y\2\2\u00e1\u00e2\7j\2\2\u00e2\u00e3\7k\2\2\u00e3\u00e4\7n\2\2"+ - "\u00e4\u00e5\7g\2\2\u00e5\37\3\2\2\2\u00e6\u00e7\7f\2\2\u00e7\u00e8\7"+ - "q\2\2\u00e8!\3\2\2\2\u00e9\u00ea\7h\2\2\u00ea\u00eb\7q\2\2\u00eb\u00ec"+ - "\7t\2\2\u00ec#\3\2\2\2\u00ed\u00ee\7e\2\2\u00ee\u00ef\7q\2\2\u00ef\u00f0"+ - "\7p\2\2\u00f0\u00f1\7v\2\2\u00f1\u00f2\7k\2\2\u00f2\u00f3\7p\2\2\u00f3"+ - "\u00f4\7w\2\2\u00f4\u00f5\7g\2\2\u00f5%\3\2\2\2\u00f6\u00f7\7d\2\2\u00f7"+ - "\u00f8\7t\2\2\u00f8\u00f9\7g\2\2\u00f9\u00fa\7c\2\2\u00fa\u00fb\7m\2\2"+ - "\u00fb\'\3\2\2\2\u00fc\u00fd\7t\2\2\u00fd\u00fe\7g\2\2\u00fe\u00ff\7v"+ - "\2\2\u00ff\u0100\7w\2\2\u0100\u0101\7t\2\2\u0101\u0102\7p\2\2\u0102)\3"+ - "\2\2\2\u0103\u0104\7p\2\2\u0104\u0105\7g\2\2\u0105\u0106\7y\2\2\u0106"+ - "+\3\2\2\2\u0107\u0108\7v\2\2\u0108\u0109\7t\2\2\u0109\u010a\7{\2\2\u010a"+ - "-\3\2\2\2\u010b\u010c\7e\2\2\u010c\u010d\7c\2\2\u010d\u010e\7v\2\2\u010e"+ - "\u010f\7e\2\2\u010f\u0110\7j\2\2\u0110/\3\2\2\2\u0111\u0112\7v\2\2\u0112"+ - "\u0113\7j\2\2\u0113\u0114\7t\2\2\u0114\u0115\7q\2\2\u0115\u0116\7y\2\2"+ - "\u0116\61\3\2\2\2\u0117\u0118\7v\2\2\u0118\u0119\7j\2\2\u0119\u011a\7"+ - "k\2\2\u011a\u011b\7u\2\2\u011b\63\3\2\2\2\u011c\u011d\7#\2\2\u011d\65"+ - "\3\2\2\2\u011e\u011f\7\u0080\2\2\u011f\67\3\2\2\2\u0120\u0121\7,\2\2\u0121"+ - "9\3\2\2\2\u0122\u0123\7\61\2\2\u0123\u0124\6\35\2\2\u0124;\3\2\2\2\u0125"+ - "\u0126\7\'\2\2\u0126=\3\2\2\2\u0127\u0128\7-\2\2\u0128?\3\2\2\2\u0129"+ - "\u012a\7/\2\2\u012aA\3\2\2\2\u012b\u012c\7>\2\2\u012c\u012d\7>\2\2\u012d"+ - "C\3\2\2\2\u012e\u012f\7@\2\2\u012f\u0130\7@\2\2\u0130E\3\2\2\2\u0131\u0132"+ - "\7@\2\2\u0132\u0133\7@\2\2\u0133\u0134\7@\2\2\u0134G\3\2\2\2\u0135\u0136"+ - "\7>\2\2\u0136I\3\2\2\2\u0137\u0138\7>\2\2\u0138\u0139\7?\2\2\u0139K\3"+ - "\2\2\2\u013a\u013b\7@\2\2\u013bM\3\2\2\2\u013c\u013d\7@\2\2\u013d\u013e"+ - "\7?\2\2\u013eO\3\2\2\2\u013f\u0140\7?\2\2\u0140\u0141\7?\2\2\u0141Q\3"+ - "\2\2\2\u0142\u0143\7?\2\2\u0143\u0144\7?\2\2\u0144\u0145\7?\2\2\u0145"+ - "S\3\2\2\2\u0146\u0147\7#\2\2\u0147\u0148\7?\2\2\u0148U\3\2\2\2\u0149\u014a"+ - "\7#\2\2\u014a\u014b\7?\2\2\u014b\u014c\7?\2\2\u014cW\3\2\2\2\u014d\u014e"+ - "\7(\2\2\u014eY\3\2\2\2\u014f\u0150\7`\2\2\u0150[\3\2\2\2\u0151\u0152\7"+ - "~\2\2\u0152]\3\2\2\2\u0153\u0154\7(\2\2\u0154\u0155\7(\2\2\u0155_\3\2"+ - "\2\2\u0156\u0157\7~\2\2\u0157\u0158\7~\2\2\u0158a\3\2\2\2\u0159\u015a"+ - "\7A\2\2\u015ac\3\2\2\2\u015b\u015c\7<\2\2\u015ce\3\2\2\2\u015d\u015e\7"+ - "<\2\2\u015e\u015f\7<\2\2\u015fg\3\2\2\2\u0160\u0161\7/\2\2\u0161\u0162"+ - "\7@\2\2\u0162i\3\2\2\2\u0163\u0164\7?\2\2\u0164\u0165\7\u0080\2\2\u0165"+ - "k\3\2\2\2\u0166\u0167\7?\2\2\u0167\u0168\7?\2\2\u0168\u0169\7\u0080\2"+ - "\2\u0169m\3\2\2\2\u016a\u016b\7-\2\2\u016b\u016c\7-\2\2\u016co\3\2\2\2"+ - "\u016d\u016e\7/\2\2\u016e\u016f\7/\2\2\u016fq\3\2\2\2\u0170\u0171\7?\2"+ - "\2\u0171s\3\2\2\2\u0172\u0173\7-\2\2\u0173\u0174\7?\2\2\u0174u\3\2\2\2"+ - "\u0175\u0176\7/\2\2\u0176\u0177\7?\2\2\u0177w\3\2\2\2\u0178\u0179\7,\2"+ - "\2\u0179\u017a\7?\2\2\u017ay\3\2\2\2\u017b\u017c\7\61\2\2\u017c\u017d"+ - "\7?\2\2\u017d{\3\2\2\2\u017e\u017f\7\'\2\2\u017f\u0180\7?\2\2\u0180}\3"+ - "\2\2\2\u0181\u0182\7(\2\2\u0182\u0183\7?\2\2\u0183\177\3\2\2\2\u0184\u0185"+ - "\7`\2\2\u0185\u0186\7?\2\2\u0186\u0081\3\2\2\2\u0187\u0188\7~\2\2\u0188"+ - "\u0189\7?\2\2\u0189\u0083\3\2\2\2\u018a\u018b\7>\2\2\u018b\u018c\7>\2"+ - "\2\u018c\u018d\7?\2\2\u018d\u0085\3\2\2\2\u018e\u018f\7@\2\2\u018f\u0190"+ - "\7@\2\2\u0190\u0191\7?\2\2\u0191\u0087\3\2\2\2\u0192\u0193\7@\2\2\u0193"+ - "\u0194\7@\2\2\u0194\u0195\7@\2\2\u0195\u0196\7?\2\2\u0196\u0089\3\2\2"+ - "\2\u0197\u0199\7\62\2\2\u0198\u019a\t\4\2\2\u0199\u0198\3\2\2\2\u019a"+ - "\u019b\3\2\2\2\u019b\u0199\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019e\3\2"+ - "\2\2\u019d\u019f\t\5\2\2\u019e\u019d\3\2\2\2\u019e\u019f\3\2\2\2\u019f"+ - "\u008b\3\2\2\2\u01a0\u01a1\7\62\2\2\u01a1\u01a3\t\6\2\2\u01a2\u01a4\t"+ - "\7\2\2\u01a3\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5\u01a3\3\2\2\2\u01a5"+ - "\u01a6\3\2\2\2\u01a6\u01a8\3\2\2\2\u01a7\u01a9\t\5\2\2\u01a8\u01a7\3\2"+ - "\2\2\u01a8\u01a9\3\2\2\2\u01a9\u008d\3\2\2\2\u01aa\u01b3\7\62\2\2\u01ab"+ - "\u01af\t\b\2\2\u01ac\u01ae\t\t\2\2\u01ad\u01ac\3\2\2\2\u01ae\u01b1\3\2"+ - "\2\2\u01af\u01ad\3\2\2\2\u01af\u01b0\3\2\2\2\u01b0\u01b3\3\2\2\2\u01b1"+ - "\u01af\3\2\2\2\u01b2\u01aa\3\2\2\2\u01b2\u01ab\3\2\2\2\u01b3\u01b5\3\2"+ - "\2\2\u01b4\u01b6\t\n\2\2\u01b5\u01b4\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ - "\u008f\3\2\2\2\u01b7\u01c0\7\62\2\2\u01b8\u01bc\t\b\2\2\u01b9\u01bb\t"+ - "\t\2\2\u01ba\u01b9\3\2\2\2\u01bb\u01be\3\2\2\2\u01bc\u01ba\3\2\2\2\u01bc"+ - "\u01bd\3\2\2\2\u01bd\u01c0\3\2\2\2\u01be\u01bc\3\2\2\2\u01bf\u01b7\3\2"+ - "\2\2\u01bf\u01b8\3\2\2\2\u01c0\u01c7\3\2\2\2\u01c1\u01c3\5\24\n\2\u01c2"+ - "\u01c4\t\t\2\2\u01c3\u01c2\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5\u01c3\3\2"+ - "\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c8\3\2\2\2\u01c7\u01c1\3\2\2\2\u01c7"+ - "\u01c8\3\2\2\2\u01c8\u01d2\3\2\2\2\u01c9\u01cb\t\13\2\2\u01ca\u01cc\t"+ - "\f\2\2\u01cb\u01ca\3\2\2\2\u01cb\u01cc\3\2\2\2\u01cc\u01ce\3\2\2\2\u01cd"+ - "\u01cf\t\t\2\2\u01ce\u01cd\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01ce\3\2"+ - "\2\2\u01d0\u01d1\3\2\2\2\u01d1\u01d3\3\2\2\2\u01d2\u01c9\3\2\2\2\u01d2"+ - "\u01d3\3\2\2\2\u01d3\u01d5\3\2\2\2\u01d4\u01d6\t\r\2\2\u01d5\u01d4\3\2"+ - "\2\2\u01d5\u01d6\3\2\2\2\u01d6\u0091\3\2\2\2\u01d7\u01df\7$\2\2\u01d8"+ - "\u01d9\7^\2\2\u01d9\u01de\7$\2\2\u01da\u01db\7^\2\2\u01db\u01de\7^\2\2"+ - "\u01dc\u01de\n\16\2\2\u01dd\u01d8\3\2\2\2\u01dd\u01da\3\2\2\2\u01dd\u01dc"+ - "\3\2\2\2\u01de\u01e1\3\2\2\2\u01df\u01e0\3\2\2\2\u01df\u01dd\3\2\2\2\u01e0"+ - "\u01e2\3\2\2\2\u01e1\u01df\3\2\2\2\u01e2\u01f0\7$\2\2\u01e3\u01eb\7)\2"+ - "\2\u01e4\u01e5\7^\2\2\u01e5\u01ea\7)\2\2\u01e6\u01e7\7^\2\2\u01e7\u01ea"+ - "\7^\2\2\u01e8\u01ea\n\16\2\2\u01e9\u01e4\3\2\2\2\u01e9\u01e6\3\2\2\2\u01e9"+ - "\u01e8\3\2\2\2\u01ea\u01ed\3\2\2\2\u01eb\u01ec\3\2\2\2\u01eb\u01e9\3\2"+ - "\2\2\u01ec\u01ee\3\2\2\2\u01ed\u01eb\3\2\2\2\u01ee\u01f0\7)\2\2\u01ef"+ - "\u01d7\3\2\2\2\u01ef\u01e3\3\2\2\2\u01f0\u0093\3\2\2\2\u01f1\u01f5\7\61"+ - "\2\2\u01f2\u01f6\n\17\2\2\u01f3\u01f4\7^\2\2\u01f4\u01f6\n\20\2\2\u01f5"+ - "\u01f2\3\2\2\2\u01f5\u01f3\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7\u01f5\3\2"+ - "\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01f9\3\2\2\2\u01f9\u01fd\7\61\2\2\u01fa"+ - "\u01fc\t\21\2\2\u01fb\u01fa\3\2\2\2\u01fc\u01ff\3\2\2\2\u01fd\u01fb\3"+ - "\2\2\2\u01fd\u01fe\3\2\2\2\u01fe\u0200\3\2\2\2\u01ff\u01fd\3\2\2\2\u0200"+ - "\u0201\6J\3\2\u0201\u0095\3\2\2\2\u0202\u0203\7v\2\2\u0203\u0204\7t\2"+ - "\2\u0204\u0205\7w\2\2\u0205\u0206\7g\2\2\u0206\u0097\3\2\2\2\u0207\u0208"+ - "\7h\2\2\u0208\u0209\7c\2\2\u0209\u020a\7n\2\2\u020a\u020b\7u\2\2\u020b"+ - "\u020c\7g\2\2\u020c\u0099\3\2\2\2\u020d\u020e\7p\2\2\u020e\u020f\7w\2"+ - "\2\u020f\u0210\7n\2\2\u0210\u0211\7n\2\2\u0211\u009b\3\2\2\2\u0212\u0218"+ - "\5\u009eO\2\u0213\u0214\5\24\n\2\u0214\u0215\5\u009eO\2\u0215\u0217\3"+ - "\2\2\2\u0216\u0213\3\2\2\2\u0217\u021a\3\2\2\2\u0218\u0216\3\2\2\2\u0218"+ - "\u0219\3\2\2\2\u0219\u021b\3\2\2\2\u021a\u0218\3\2\2\2\u021b\u021c\6N"+ - "\4\2\u021c\u009d\3\2\2\2\u021d\u0221\t\22\2\2\u021e\u0220\t\23\2\2\u021f"+ - "\u021e\3\2\2\2\u0220\u0223\3\2\2\2\u0221\u021f\3\2\2\2\u0221\u0222\3\2"+ - "\2\2\u0222\u009f\3\2\2\2\u0223\u0221\3\2\2\2\u0224\u022d\7\62\2\2\u0225"+ - "\u0229\t\b\2\2\u0226\u0228\t\t\2\2\u0227\u0226\3\2\2\2\u0228\u022b\3\2"+ - "\2\2\u0229\u0227\3\2\2\2\u0229\u022a\3\2\2\2\u022a\u022d\3\2\2\2\u022b"+ - "\u0229\3\2\2\2\u022c\u0224\3\2\2\2\u022c\u0225\3\2\2\2\u022d\u022e\3\2"+ - "\2\2\u022e\u022f\bP\4\2\u022f\u00a1\3\2\2\2\u0230\u0234\t\22\2\2\u0231"+ - "\u0233\t\23\2\2\u0232\u0231\3\2\2\2\u0233\u0236\3\2\2\2\u0234\u0232\3"+ - "\2\2\2\u0234\u0235\3\2\2\2\u0235\u0237\3\2\2\2\u0236\u0234\3\2\2\2\u0237"+ - "\u0238\bQ\4\2\u0238\u00a3\3\2\2\2$\2\3\u00a7\u00b1\u00bb\u00c0\u019b\u019e"+ - "\u01a5\u01a8\u01af\u01b2\u01b5\u01bc\u01bf\u01c5\u01c7\u01cb\u01d0\u01d2"+ - "\u01d5\u01dd\u01df\u01e9\u01eb\u01ef\u01f5\u01f7\u01fd\u0218\u0221\u0229"+ - "\u022c\u0234\5\b\2\2\4\3\2\4\2\2"; + "I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\3\2\6"+ + "\2\u00aa\n\2\r\2\16\2\u00ab\3\2\3\2\3\3\3\3\3\3\3\3\7\3\u00b4\n\3\f\3"+ + "\16\3\u00b7\13\3\3\3\3\3\3\3\3\3\3\3\7\3\u00be\n\3\f\3\16\3\u00c1\13\3"+ + "\3\3\3\3\5\3\u00c5\n\3\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3\b"+ + "\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\16"+ + "\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21"+ + "\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24"+ + "\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26"+ + "\3\26\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31"+ + "\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33"+ + "\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\35\3\35\3\36\3\36\3\37\3\37"+ + "\3\37\3 \3 \3!\3!\3\"\3\"\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%\3&\3&\3\'\3\'"+ + "\3\'\3(\3(\3)\3)\3)\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3-\3-\3-\3-\3.\3.\3"+ + "/\3/\3\60\3\60\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63\3\64\3\64\3\65"+ + "\3\65\3\65\3\66\3\66\3\66\3\67\3\67\3\67\38\38\38\38\39\39\39\3:\3:\3"+ + ":\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3"+ + "B\3C\3C\3C\3D\3D\3D\3D\3E\3E\3E\3E\3F\3F\3F\3F\3F\3G\3G\6G\u01ac\nG\r"+ + "G\16G\u01ad\3G\5G\u01b1\nG\3H\3H\3H\6H\u01b6\nH\rH\16H\u01b7\3H\5H\u01bb"+ + "\nH\3I\3I\3I\7I\u01c0\nI\fI\16I\u01c3\13I\5I\u01c5\nI\3I\5I\u01c8\nI\3"+ + "J\3J\3J\7J\u01cd\nJ\fJ\16J\u01d0\13J\5J\u01d2\nJ\3J\3J\6J\u01d6\nJ\rJ"+ + "\16J\u01d7\5J\u01da\nJ\3J\3J\5J\u01de\nJ\3J\6J\u01e1\nJ\rJ\16J\u01e2\5"+ + "J\u01e5\nJ\3J\5J\u01e8\nJ\3K\3K\3K\3K\3K\3K\7K\u01f0\nK\fK\16K\u01f3\13"+ + "K\3K\3K\3K\3K\3K\3K\3K\7K\u01fc\nK\fK\16K\u01ff\13K\3K\5K\u0202\nK\3L"+ + "\3L\3L\3L\6L\u0208\nL\rL\16L\u0209\3L\3L\7L\u020e\nL\fL\16L\u0211\13L"+ + "\3L\3L\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3O\3O\3O\3O\3O\3P\3P\3P\3P\7P"+ + "\u0229\nP\fP\16P\u022c\13P\3P\3P\3Q\3Q\7Q\u0232\nQ\fQ\16Q\u0235\13Q\3"+ + "R\3R\3R\7R\u023a\nR\fR\16R\u023d\13R\5R\u023f\nR\3R\3R\3S\3S\7S\u0245"+ + "\nS\fS\16S\u0248\13S\3S\3S\6\u00b5\u00bf\u01f1\u01fd\2T\4\3\6\4\b\5\n"+ + "\6\f\7\16\b\20\t\22\n\24\13\26\f\30\r\32\16\34\17\36\20 \21\"\22$\23&"+ + "\24(\25*\26,\27.\30\60\31\62\32\64\33\66\348\35:\36<\37> @!B\"D#F$H%J"+ + "&L\'N(P)R*T+V,X-Z.\\/^\60`\61b\62d\63f\64h\65j\66l\67n8p9r:t;v|?"+ + "~@\u0080A\u0082B\u0084C\u0086D\u0088E\u008aF\u008cG\u008eH\u0090I\u0092"+ + "J\u0094K\u0096L\u0098M\u009aN\u009cO\u009eP\u00a0Q\u00a2R\u00a4S\u00a6"+ + "T\4\2\3\24\5\2\13\f\17\17\"\"\4\2\f\f\17\17\3\2\629\4\2NNnn\4\2ZZzz\5"+ + "\2\62;CHch\3\2\63;\3\2\62;\b\2FFHHNNffhhnn\4\2GGgg\4\2--//\4\2HHhh\4\2"+ + "$$^^\4\2\f\f\61\61\3\2\f\f\t\2WWeekknouuwwzz\5\2C\\aac|\6\2\62;C\\aac"+ + "|\u026b\2\4\3\2\2\2\2\6\3\2\2\2\2\b\3\2\2\2\2\n\3\2\2\2\2\f\3\2\2\2\2"+ + "\16\3\2\2\2\2\20\3\2\2\2\2\22\3\2\2\2\2\24\3\2\2\2\2\26\3\2\2\2\2\30\3"+ + "\2\2\2\2\32\3\2\2\2\2\34\3\2\2\2\2\36\3\2\2\2\2 \3\2\2\2\2\"\3\2\2\2\2"+ + "$\3\2\2\2\2&\3\2\2\2\2(\3\2\2\2\2*\3\2\2\2\2,\3\2\2\2\2.\3\2\2\2\2\60"+ + "\3\2\2\2\2\62\3\2\2\2\2\64\3\2\2\2\2\66\3\2\2\2\28\3\2\2\2\2:\3\2\2\2"+ + "\2<\3\2\2\2\2>\3\2\2\2\2@\3\2\2\2\2B\3\2\2\2\2D\3\2\2\2\2F\3\2\2\2\2H"+ + "\3\2\2\2\2J\3\2\2\2\2L\3\2\2\2\2N\3\2\2\2\2P\3\2\2\2\2R\3\2\2\2\2T\3\2"+ + "\2\2\2V\3\2\2\2\2X\3\2\2\2\2Z\3\2\2\2\2\\\3\2\2\2\2^\3\2\2\2\2`\3\2\2"+ + "\2\2b\3\2\2\2\2d\3\2\2\2\2f\3\2\2\2\2h\3\2\2\2\2j\3\2\2\2\2l\3\2\2\2\2"+ + "n\3\2\2\2\2p\3\2\2\2\2r\3\2\2\2\2t\3\2\2\2\2v\3\2\2\2\2x\3\2\2\2\2z\3"+ + "\2\2\2\2|\3\2\2\2\2~\3\2\2\2\2\u0080\3\2\2\2\2\u0082\3\2\2\2\2\u0084\3"+ + "\2\2\2\2\u0086\3\2\2\2\2\u0088\3\2\2\2\2\u008a\3\2\2\2\2\u008c\3\2\2\2"+ + "\2\u008e\3\2\2\2\2\u0090\3\2\2\2\2\u0092\3\2\2\2\2\u0094\3\2\2\2\2\u0096"+ + "\3\2\2\2\2\u0098\3\2\2\2\2\u009a\3\2\2\2\2\u009c\3\2\2\2\2\u009e\3\2\2"+ + "\2\2\u00a0\3\2\2\2\2\u00a2\3\2\2\2\3\u00a4\3\2\2\2\3\u00a6\3\2\2\2\4\u00a9"+ + "\3\2\2\2\6\u00c4\3\2\2\2\b\u00c8\3\2\2\2\n\u00ca\3\2\2\2\f\u00cc\3\2\2"+ + "\2\16\u00ce\3\2\2\2\20\u00d0\3\2\2\2\22\u00d2\3\2\2\2\24\u00d4\3\2\2\2"+ + "\26\u00d8\3\2\2\2\30\u00da\3\2\2\2\32\u00dc\3\2\2\2\34\u00df\3\2\2\2\36"+ + "\u00e2\3\2\2\2 \u00e7\3\2\2\2\"\u00ed\3\2\2\2$\u00f0\3\2\2\2&\u00f4\3"+ + "\2\2\2(\u00fd\3\2\2\2*\u0103\3\2\2\2,\u010a\3\2\2\2.\u010e\3\2\2\2\60"+ + "\u0112\3\2\2\2\62\u0118\3\2\2\2\64\u011e\3\2\2\2\66\u0123\3\2\2\28\u012e"+ + "\3\2\2\2:\u0130\3\2\2\2<\u0132\3\2\2\2>\u0134\3\2\2\2@\u0137\3\2\2\2B"+ + "\u0139\3\2\2\2D\u013b\3\2\2\2F\u013d\3\2\2\2H\u0140\3\2\2\2J\u0143\3\2"+ + "\2\2L\u0147\3\2\2\2N\u0149\3\2\2\2P\u014c\3\2\2\2R\u014e\3\2\2\2T\u0151"+ + "\3\2\2\2V\u0154\3\2\2\2X\u0158\3\2\2\2Z\u015b\3\2\2\2\\\u015f\3\2\2\2"+ + "^\u0161\3\2\2\2`\u0163\3\2\2\2b\u0165\3\2\2\2d\u0168\3\2\2\2f\u016b\3"+ + "\2\2\2h\u016d\3\2\2\2j\u016f\3\2\2\2l\u0172\3\2\2\2n\u0175\3\2\2\2p\u0178"+ + "\3\2\2\2r\u017c\3\2\2\2t\u017f\3\2\2\2v\u0182\3\2\2\2x\u0184\3\2\2\2z"+ + "\u0187\3\2\2\2|\u018a\3\2\2\2~\u018d\3\2\2\2\u0080\u0190\3\2\2\2\u0082"+ + "\u0193\3\2\2\2\u0084\u0196\3\2\2\2\u0086\u0199\3\2\2\2\u0088\u019c\3\2"+ + "\2\2\u008a\u01a0\3\2\2\2\u008c\u01a4\3\2\2\2\u008e\u01a9\3\2\2\2\u0090"+ + "\u01b2\3\2\2\2\u0092\u01c4\3\2\2\2\u0094\u01d1\3\2\2\2\u0096\u0201\3\2"+ + "\2\2\u0098\u0203\3\2\2\2\u009a\u0214\3\2\2\2\u009c\u0219\3\2\2\2\u009e"+ + "\u021f\3\2\2\2\u00a0\u0224\3\2\2\2\u00a2\u022f\3\2\2\2\u00a4\u023e\3\2"+ + "\2\2\u00a6\u0242\3\2\2\2\u00a8\u00aa\t\2\2\2\u00a9\u00a8\3\2\2\2\u00aa"+ + "\u00ab\3\2\2\2\u00ab\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2"+ + "\2\2\u00ad\u00ae\b\2\2\2\u00ae\5\3\2\2\2\u00af\u00b0\7\61\2\2\u00b0\u00b1"+ + "\7\61\2\2\u00b1\u00b5\3\2\2\2\u00b2\u00b4\13\2\2\2\u00b3\u00b2\3\2\2\2"+ + "\u00b4\u00b7\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b5\u00b3\3\2\2\2\u00b6\u00b8"+ + "\3\2\2\2\u00b7\u00b5\3\2\2\2\u00b8\u00c5\t\3\2\2\u00b9\u00ba\7\61\2\2"+ + "\u00ba\u00bb\7,\2\2\u00bb\u00bf\3\2\2\2\u00bc\u00be\13\2\2\2\u00bd\u00bc"+ + "\3\2\2\2\u00be\u00c1\3\2\2\2\u00bf\u00c0\3\2\2\2\u00bf\u00bd\3\2\2\2\u00c0"+ + "\u00c2\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c2\u00c3\7,\2\2\u00c3\u00c5\7\61"+ + "\2\2\u00c4\u00af\3\2\2\2\u00c4\u00b9\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6"+ + "\u00c7\b\3\2\2\u00c7\7\3\2\2\2\u00c8\u00c9\7}\2\2\u00c9\t\3\2\2\2\u00ca"+ + "\u00cb\7\177\2\2\u00cb\13\3\2\2\2\u00cc\u00cd\7]\2\2\u00cd\r\3\2\2\2\u00ce"+ + "\u00cf\7_\2\2\u00cf\17\3\2\2\2\u00d0\u00d1\7*\2\2\u00d1\21\3\2\2\2\u00d2"+ + "\u00d3\7+\2\2\u00d3\23\3\2\2\2\u00d4\u00d5\7\60\2\2\u00d5\u00d6\3\2\2"+ + "\2\u00d6\u00d7\b\n\3\2\u00d7\25\3\2\2\2\u00d8\u00d9\7.\2\2\u00d9\27\3"+ + "\2\2\2\u00da\u00db\7=\2\2\u00db\31\3\2\2\2\u00dc\u00dd\7k\2\2\u00dd\u00de"+ + "\7h\2\2\u00de\33\3\2\2\2\u00df\u00e0\7k\2\2\u00e0\u00e1\7p\2\2\u00e1\35"+ + "\3\2\2\2\u00e2\u00e3\7g\2\2\u00e3\u00e4\7n\2\2\u00e4\u00e5\7u\2\2\u00e5"+ + "\u00e6\7g\2\2\u00e6\37\3\2\2\2\u00e7\u00e8\7y\2\2\u00e8\u00e9\7j\2\2\u00e9"+ + "\u00ea\7k\2\2\u00ea\u00eb\7n\2\2\u00eb\u00ec\7g\2\2\u00ec!\3\2\2\2\u00ed"+ + "\u00ee\7f\2\2\u00ee\u00ef\7q\2\2\u00ef#\3\2\2\2\u00f0\u00f1\7h\2\2\u00f1"+ + "\u00f2\7q\2\2\u00f2\u00f3\7t\2\2\u00f3%\3\2\2\2\u00f4\u00f5\7e\2\2\u00f5"+ + "\u00f6\7q\2\2\u00f6\u00f7\7p\2\2\u00f7\u00f8\7v\2\2\u00f8\u00f9\7k\2\2"+ + "\u00f9\u00fa\7p\2\2\u00fa\u00fb\7w\2\2\u00fb\u00fc\7g\2\2\u00fc\'\3\2"+ + "\2\2\u00fd\u00fe\7d\2\2\u00fe\u00ff\7t\2\2\u00ff\u0100\7g\2\2\u0100\u0101"+ + "\7c\2\2\u0101\u0102\7m\2\2\u0102)\3\2\2\2\u0103\u0104\7t\2\2\u0104\u0105"+ + "\7g\2\2\u0105\u0106\7v\2\2\u0106\u0107\7w\2\2\u0107\u0108\7t\2\2\u0108"+ + "\u0109\7p\2\2\u0109+\3\2\2\2\u010a\u010b\7p\2\2\u010b\u010c\7g\2\2\u010c"+ + "\u010d\7y\2\2\u010d-\3\2\2\2\u010e\u010f\7v\2\2\u010f\u0110\7t\2\2\u0110"+ + "\u0111\7{\2\2\u0111/\3\2\2\2\u0112\u0113\7e\2\2\u0113\u0114\7c\2\2\u0114"+ + "\u0115\7v\2\2\u0115\u0116\7e\2\2\u0116\u0117\7j\2\2\u0117\61\3\2\2\2\u0118"+ + "\u0119\7v\2\2\u0119\u011a\7j\2\2\u011a\u011b\7t\2\2\u011b\u011c\7q\2\2"+ + "\u011c\u011d\7y\2\2\u011d\63\3\2\2\2\u011e\u011f\7v\2\2\u011f\u0120\7"+ + "j\2\2\u0120\u0121\7k\2\2\u0121\u0122\7u\2\2\u0122\65\3\2\2\2\u0123\u0124"+ + "\7k\2\2\u0124\u0125\7p\2\2\u0125\u0126\7u\2\2\u0126\u0127\7v\2\2\u0127"+ + "\u0128\7c\2\2\u0128\u0129\7p\2\2\u0129\u012a\7e\2\2\u012a\u012b\7g\2\2"+ + "\u012b\u012c\7q\2\2\u012c\u012d\7h\2\2\u012d\67\3\2\2\2\u012e\u012f\7"+ + "#\2\2\u012f9\3\2\2\2\u0130\u0131\7\u0080\2\2\u0131;\3\2\2\2\u0132\u0133"+ + "\7,\2\2\u0133=\3\2\2\2\u0134\u0135\7\61\2\2\u0135\u0136\6\37\2\2\u0136"+ + "?\3\2\2\2\u0137\u0138\7\'\2\2\u0138A\3\2\2\2\u0139\u013a\7-\2\2\u013a"+ + "C\3\2\2\2\u013b\u013c\7/\2\2\u013cE\3\2\2\2\u013d\u013e\7>\2\2\u013e\u013f"+ + "\7>\2\2\u013fG\3\2\2\2\u0140\u0141\7@\2\2\u0141\u0142\7@\2\2\u0142I\3"+ + "\2\2\2\u0143\u0144\7@\2\2\u0144\u0145\7@\2\2\u0145\u0146\7@\2\2\u0146"+ + "K\3\2\2\2\u0147\u0148\7>\2\2\u0148M\3\2\2\2\u0149\u014a\7>\2\2\u014a\u014b"+ + "\7?\2\2\u014bO\3\2\2\2\u014c\u014d\7@\2\2\u014dQ\3\2\2\2\u014e\u014f\7"+ + "@\2\2\u014f\u0150\7?\2\2\u0150S\3\2\2\2\u0151\u0152\7?\2\2\u0152\u0153"+ + "\7?\2\2\u0153U\3\2\2\2\u0154\u0155\7?\2\2\u0155\u0156\7?\2\2\u0156\u0157"+ + "\7?\2\2\u0157W\3\2\2\2\u0158\u0159\7#\2\2\u0159\u015a\7?\2\2\u015aY\3"+ + "\2\2\2\u015b\u015c\7#\2\2\u015c\u015d\7?\2\2\u015d\u015e\7?\2\2\u015e"+ + "[\3\2\2\2\u015f\u0160\7(\2\2\u0160]\3\2\2\2\u0161\u0162\7`\2\2\u0162_"+ + "\3\2\2\2\u0163\u0164\7~\2\2\u0164a\3\2\2\2\u0165\u0166\7(\2\2\u0166\u0167"+ + "\7(\2\2\u0167c\3\2\2\2\u0168\u0169\7~\2\2\u0169\u016a\7~\2\2\u016ae\3"+ + "\2\2\2\u016b\u016c\7A\2\2\u016cg\3\2\2\2\u016d\u016e\7<\2\2\u016ei\3\2"+ + "\2\2\u016f\u0170\7<\2\2\u0170\u0171\7<\2\2\u0171k\3\2\2\2\u0172\u0173"+ + "\7/\2\2\u0173\u0174\7@\2\2\u0174m\3\2\2\2\u0175\u0176\7?\2\2\u0176\u0177"+ + "\7\u0080\2\2\u0177o\3\2\2\2\u0178\u0179\7?\2\2\u0179\u017a\7?\2\2\u017a"+ + "\u017b\7\u0080\2\2\u017bq\3\2\2\2\u017c\u017d\7-\2\2\u017d\u017e\7-\2"+ + "\2\u017es\3\2\2\2\u017f\u0180\7/\2\2\u0180\u0181\7/\2\2\u0181u\3\2\2\2"+ + "\u0182\u0183\7?\2\2\u0183w\3\2\2\2\u0184\u0185\7-\2\2\u0185\u0186\7?\2"+ + "\2\u0186y\3\2\2\2\u0187\u0188\7/\2\2\u0188\u0189\7?\2\2\u0189{\3\2\2\2"+ + "\u018a\u018b\7,\2\2\u018b\u018c\7?\2\2\u018c}\3\2\2\2\u018d\u018e\7\61"+ + "\2\2\u018e\u018f\7?\2\2\u018f\177\3\2\2\2\u0190\u0191\7\'\2\2\u0191\u0192"+ + "\7?\2\2\u0192\u0081\3\2\2\2\u0193\u0194\7(\2\2\u0194\u0195\7?\2\2\u0195"+ + "\u0083\3\2\2\2\u0196\u0197\7`\2\2\u0197\u0198\7?\2\2\u0198\u0085\3\2\2"+ + "\2\u0199\u019a\7~\2\2\u019a\u019b\7?\2\2\u019b\u0087\3\2\2\2\u019c\u019d"+ + "\7>\2\2\u019d\u019e\7>\2\2\u019e\u019f\7?\2\2\u019f\u0089\3\2\2\2\u01a0"+ + "\u01a1\7@\2\2\u01a1\u01a2\7@\2\2\u01a2\u01a3\7?\2\2\u01a3\u008b\3\2\2"+ + "\2\u01a4\u01a5\7@\2\2\u01a5\u01a6\7@\2\2\u01a6\u01a7\7@\2\2\u01a7\u01a8"+ + "\7?\2\2\u01a8\u008d\3\2\2\2\u01a9\u01ab\7\62\2\2\u01aa\u01ac\t\4\2\2\u01ab"+ + "\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01ab\3\2\2\2\u01ad\u01ae\3\2"+ + "\2\2\u01ae\u01b0\3\2\2\2\u01af\u01b1\t\5\2\2\u01b0\u01af\3\2\2\2\u01b0"+ + "\u01b1\3\2\2\2\u01b1\u008f\3\2\2\2\u01b2\u01b3\7\62\2\2\u01b3\u01b5\t"+ + "\6\2\2\u01b4\u01b6\t\7\2\2\u01b5\u01b4\3\2\2\2\u01b6\u01b7\3\2\2\2\u01b7"+ + "\u01b5\3\2\2\2\u01b7\u01b8\3\2\2\2\u01b8\u01ba\3\2\2\2\u01b9\u01bb\t\5"+ + "\2\2\u01ba\u01b9\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u0091\3\2\2\2\u01bc"+ + "\u01c5\7\62\2\2\u01bd\u01c1\t\b\2\2\u01be\u01c0\t\t\2\2\u01bf\u01be\3"+ + "\2\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01bf\3\2\2\2\u01c1\u01c2\3\2\2\2\u01c2"+ + "\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c4\u01bc\3\2\2\2\u01c4\u01bd\3\2"+ + "\2\2\u01c5\u01c7\3\2\2\2\u01c6\u01c8\t\n\2\2\u01c7\u01c6\3\2\2\2\u01c7"+ + "\u01c8\3\2\2\2\u01c8\u0093\3\2\2\2\u01c9\u01d2\7\62\2\2\u01ca\u01ce\t"+ + "\b\2\2\u01cb\u01cd\t\t\2\2\u01cc\u01cb\3\2\2\2\u01cd\u01d0\3\2\2\2\u01ce"+ + "\u01cc\3\2\2\2\u01ce\u01cf\3\2\2\2\u01cf\u01d2\3\2\2\2\u01d0\u01ce\3\2"+ + "\2\2\u01d1\u01c9\3\2\2\2\u01d1\u01ca\3\2\2\2\u01d2\u01d9\3\2\2\2\u01d3"+ + "\u01d5\5\24\n\2\u01d4\u01d6\t\t\2\2\u01d5\u01d4\3\2\2\2\u01d6\u01d7\3"+ + "\2\2\2\u01d7\u01d5\3\2\2\2\u01d7\u01d8\3\2\2\2\u01d8\u01da\3\2\2\2\u01d9"+ + "\u01d3\3\2\2\2\u01d9\u01da\3\2\2\2\u01da\u01e4\3\2\2\2\u01db\u01dd\t\13"+ + "\2\2\u01dc\u01de\t\f\2\2\u01dd\u01dc\3\2\2\2\u01dd\u01de\3\2\2\2\u01de"+ + "\u01e0\3\2\2\2\u01df\u01e1\t\t\2\2\u01e0\u01df\3\2\2\2\u01e1\u01e2\3\2"+ + "\2\2\u01e2\u01e0\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3\u01e5\3\2\2\2\u01e4"+ + "\u01db\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e7\3\2\2\2\u01e6\u01e8\t\r"+ + "\2\2\u01e7\u01e6\3\2\2\2\u01e7\u01e8\3\2\2\2\u01e8\u0095\3\2\2\2\u01e9"+ + "\u01f1\7$\2\2\u01ea\u01eb\7^\2\2\u01eb\u01f0\7$\2\2\u01ec\u01ed\7^\2\2"+ + "\u01ed\u01f0\7^\2\2\u01ee\u01f0\n\16\2\2\u01ef\u01ea\3\2\2\2\u01ef\u01ec"+ + "\3\2\2\2\u01ef\u01ee\3\2\2\2\u01f0\u01f3\3\2\2\2\u01f1\u01f2\3\2\2\2\u01f1"+ + "\u01ef\3\2\2\2\u01f2\u01f4\3\2\2\2\u01f3\u01f1\3\2\2\2\u01f4\u0202\7$"+ + "\2\2\u01f5\u01fd\7)\2\2\u01f6\u01f7\7^\2\2\u01f7\u01fc\7)\2\2\u01f8\u01f9"+ + "\7^\2\2\u01f9\u01fc\7^\2\2\u01fa\u01fc\n\16\2\2\u01fb\u01f6\3\2\2\2\u01fb"+ + "\u01f8\3\2\2\2\u01fb\u01fa\3\2\2\2\u01fc\u01ff\3\2\2\2\u01fd\u01fe\3\2"+ + "\2\2\u01fd\u01fb\3\2\2\2\u01fe\u0200\3\2\2\2\u01ff\u01fd\3\2\2\2\u0200"+ + "\u0202\7)\2\2\u0201\u01e9\3\2\2\2\u0201\u01f5\3\2\2\2\u0202\u0097\3\2"+ + "\2\2\u0203\u0207\7\61\2\2\u0204\u0208\n\17\2\2\u0205\u0206\7^\2\2\u0206"+ + "\u0208\n\20\2\2\u0207\u0204\3\2\2\2\u0207\u0205\3\2\2\2\u0208\u0209\3"+ + "\2\2\2\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\3\2\2\2\u020b"+ + "\u020f\7\61\2\2\u020c\u020e\t\21\2\2\u020d\u020c\3\2\2\2\u020e\u0211\3"+ + "\2\2\2\u020f\u020d\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0212\3\2\2\2\u0211"+ + "\u020f\3\2\2\2\u0212\u0213\6L\3\2\u0213\u0099\3\2\2\2\u0214\u0215\7v\2"+ + "\2\u0215\u0216\7t\2\2\u0216\u0217\7w\2\2\u0217\u0218\7g\2\2\u0218\u009b"+ + "\3\2\2\2\u0219\u021a\7h\2\2\u021a\u021b\7c\2\2\u021b\u021c\7n\2\2\u021c"+ + "\u021d\7u\2\2\u021d\u021e\7g\2\2\u021e\u009d\3\2\2\2\u021f\u0220\7p\2"+ + "\2\u0220\u0221\7w\2\2\u0221\u0222\7n\2\2\u0222\u0223\7n\2\2\u0223\u009f"+ + "\3\2\2\2\u0224\u022a\5\u00a2Q\2\u0225\u0226\5\24\n\2\u0226\u0227\5\u00a2"+ + "Q\2\u0227\u0229\3\2\2\2\u0228\u0225\3\2\2\2\u0229\u022c\3\2\2\2\u022a"+ + "\u0228\3\2\2\2\u022a\u022b\3\2\2\2\u022b\u022d\3\2\2\2\u022c\u022a\3\2"+ + "\2\2\u022d\u022e\6P\4\2\u022e\u00a1\3\2\2\2\u022f\u0233\t\22\2\2\u0230"+ + "\u0232\t\23\2\2\u0231\u0230\3\2\2\2\u0232\u0235\3\2\2\2\u0233\u0231\3"+ + "\2\2\2\u0233\u0234\3\2\2\2\u0234\u00a3\3\2\2\2\u0235\u0233\3\2\2\2\u0236"+ + "\u023f\7\62\2\2\u0237\u023b\t\b\2\2\u0238\u023a\t\t\2\2\u0239\u0238\3"+ + "\2\2\2\u023a\u023d\3\2\2\2\u023b\u0239\3\2\2\2\u023b\u023c\3\2\2\2\u023c"+ + "\u023f\3\2\2\2\u023d\u023b\3\2\2\2\u023e\u0236\3\2\2\2\u023e\u0237\3\2"+ + "\2\2\u023f\u0240\3\2\2\2\u0240\u0241\bR\4\2\u0241\u00a5\3\2\2\2\u0242"+ + "\u0246\t\22\2\2\u0243\u0245\t\23\2\2\u0244\u0243\3\2\2\2\u0245\u0248\3"+ + "\2\2\2\u0246\u0244\3\2\2\2\u0246\u0247\3\2\2\2\u0247\u0249\3\2\2\2\u0248"+ + "\u0246\3\2\2\2\u0249\u024a\bS\4\2\u024a\u00a7\3\2\2\2$\2\3\u00ab\u00b5"+ + "\u00bf\u00c4\u01ad\u01b0\u01b7\u01ba\u01c1\u01c4\u01c7\u01ce\u01d1\u01d7"+ + "\u01d9\u01dd\u01e2\u01e4\u01e7\u01ef\u01f1\u01fb\u01fd\u0201\u0207\u0209"+ + "\u020f\u022a\u0233\u023b\u023e\u0246\5\b\2\2\4\3\2\4\2\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index f6ace39be63..933a5f35dcd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -18,15 +18,16 @@ class PainlessParser extends Parser { new PredictionContextCache(); public static final int WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - COMMA=10, SEMICOLON=11, IF=12, ELSE=13, WHILE=14, DO=15, FOR=16, CONTINUE=17, - BREAK=18, RETURN=19, NEW=20, TRY=21, CATCH=22, THROW=23, THIS=24, BOOLNOT=25, - BWNOT=26, MUL=27, DIV=28, REM=29, ADD=30, SUB=31, LSH=32, RSH=33, USH=34, - LT=35, LTE=36, GT=37, GTE=38, EQ=39, EQR=40, NE=41, NER=42, BWAND=43, - XOR=44, BWOR=45, BOOLAND=46, BOOLOR=47, COND=48, COLON=49, REF=50, ARROW=51, - FIND=52, MATCH=53, INCR=54, DECR=55, ASSIGN=56, AADD=57, ASUB=58, AMUL=59, - ADIV=60, AREM=61, AAND=62, AXOR=63, AOR=64, ALSH=65, ARSH=66, AUSH=67, - OCTAL=68, HEX=69, INTEGER=70, DECIMAL=71, STRING=72, REGEX=73, TRUE=74, - FALSE=75, NULL=76, TYPE=77, ID=78, DOTINTEGER=79, DOTID=80; + COMMA=10, SEMICOLON=11, IF=12, IN=13, ELSE=14, WHILE=15, DO=16, FOR=17, + CONTINUE=18, BREAK=19, RETURN=20, NEW=21, TRY=22, CATCH=23, THROW=24, + THIS=25, INSTANCEOF=26, BOOLNOT=27, BWNOT=28, MUL=29, DIV=30, REM=31, + ADD=32, SUB=33, LSH=34, RSH=35, USH=36, LT=37, LTE=38, GT=39, GTE=40, + EQ=41, EQR=42, NE=43, NER=44, BWAND=45, XOR=46, BWOR=47, BOOLAND=48, BOOLOR=49, + COND=50, COLON=51, REF=52, ARROW=53, FIND=54, MATCH=55, INCR=56, DECR=57, + ASSIGN=58, AADD=59, ASUB=60, AMUL=61, ADIV=62, AREM=63, AAND=64, AXOR=65, + AOR=66, ALSH=67, ARSH=68, AUSH=69, OCTAL=70, HEX=71, INTEGER=72, DECIMAL=73, + STRING=74, REGEX=75, TRUE=76, FALSE=77, NULL=78, TYPE=79, ID=80, DOTINTEGER=81, + DOTID=82; public static final int RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, RULE_trailer = 4, RULE_block = 5, RULE_empty = 6, RULE_initializer = 7, @@ -48,25 +49,25 @@ class PainlessParser extends Parser { private static final String[] _LITERAL_NAMES = { null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "','", - "';'", "'if'", "'else'", "'while'", "'do'", "'for'", "'continue'", "'break'", - "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", "'!'", "'~'", - "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", "'>>'", "'>>>'", "'<'", "'<='", - "'>'", "'>='", "'=='", "'==='", "'!='", "'!=='", "'&'", "'^'", "'|'", - "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", "'=~'", "'==~'", "'++'", - "'--'", "'='", "'+='", "'-='", "'*='", "'/='", "'%='", "'&='", "'^='", - "'|='", "'<<='", "'>>='", "'>>>='", null, null, null, null, null, null, - "'true'", "'false'", "'null'" + "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", + "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", + "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'::'", "'->'", + "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", "'/='", + "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, + null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "COMMA", "SEMICOLON", "IF", "ELSE", "WHILE", "DO", "FOR", "CONTINUE", - "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", "BOOLNOT", - "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", "RSH", "USH", "LT", - "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", "XOR", "BWOR", - "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", "MATCH", - "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", "AAND", - "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", "DECIMAL", - "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", + "DOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", + "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", + "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", + "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", + "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "REF", "ARROW", "FIND", + "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", "AREM", + "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", "INTEGER", + "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -364,33 +365,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class ThrowContext extends StatementContext { - public TerminalNode THROW() { return getToken(PainlessParser.THROW, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public DelimiterContext delimiter() { - return getRuleContext(DelimiterContext.class,0); - } - public ThrowContext(StatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitThrow(this); - else return visitor.visitChildren(this); - } - } - public static class ContinueContext extends StatementContext { - public TerminalNode CONTINUE() { return getToken(PainlessParser.CONTINUE, 0); } - public DelimiterContext delimiter() { - return getRuleContext(DelimiterContext.class,0); - } - public ContinueContext(StatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitContinue(this); - else return visitor.visitChildren(this); - } - } public static class ForContext extends StatementContext { public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } @@ -421,38 +395,6 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class TryContext extends StatementContext { - public TerminalNode TRY() { return getToken(PainlessParser.TRY, 0); } - public BlockContext block() { - return getRuleContext(BlockContext.class,0); - } - public List trap() { - return getRuleContexts(TrapContext.class); - } - public TrapContext trap(int i) { - return getRuleContext(TrapContext.class,i); - } - public TryContext(StatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTry(this); - else return visitor.visitChildren(this); - } - } - public static class ExprContext extends StatementContext { - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public DelimiterContext delimiter() { - return getRuleContext(DelimiterContext.class,0); - } - public ExprContext(StatementContext ctx) { copyFrom(ctx); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExpr(this); - else return visitor.visitChildren(this); - } - } public static class DoContext extends StatementContext { public TerminalNode DO() { return getToken(PainlessParser.DO, 0); } public BlockContext block() { @@ -494,24 +436,22 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } - public static class IfContext extends StatementContext { - public TerminalNode IF() { return getToken(PainlessParser.IF, 0); } + public static class IneachContext extends StatementContext { + public TerminalNode FOR() { return getToken(PainlessParser.FOR, 0); } public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public TerminalNode ID() { return getToken(PainlessParser.ID, 0); } + public TerminalNode IN() { return getToken(PainlessParser.IN, 0); } public ExpressionContext expression() { return getRuleContext(ExpressionContext.class,0); } public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } - public List trailer() { - return getRuleContexts(TrailerContext.class); + public TrailerContext trailer() { + return getRuleContext(TrailerContext.class,0); } - public TrailerContext trailer(int i) { - return getRuleContext(TrailerContext.class,i); - } - public TerminalNode ELSE() { return getToken(PainlessParser.ELSE, 0); } - public IfContext(StatementContext ctx) { copyFrom(ctx); } + public IneachContext(StatementContext ctx) { copyFrom(ctx); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIf(this); + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIneach(this); else return visitor.visitChildren(this); } } @@ -537,6 +477,86 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class ThrowContext extends StatementContext { + public TerminalNode THROW() { return getToken(PainlessParser.THROW, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } + public ThrowContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitThrow(this); + else return visitor.visitChildren(this); + } + } + public static class ContinueContext extends StatementContext { + public TerminalNode CONTINUE() { return getToken(PainlessParser.CONTINUE, 0); } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } + public ContinueContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitContinue(this); + else return visitor.visitChildren(this); + } + } + public static class TryContext extends StatementContext { + public TerminalNode TRY() { return getToken(PainlessParser.TRY, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public List trap() { + return getRuleContexts(TrapContext.class); + } + public TrapContext trap(int i) { + return getRuleContext(TrapContext.class,i); + } + public TryContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitTry(this); + else return visitor.visitChildren(this); + } + } + public static class ExprContext extends StatementContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public DelimiterContext delimiter() { + return getRuleContext(DelimiterContext.class,0); + } + public ExprContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitExpr(this); + else return visitor.visitChildren(this); + } + } + public static class IfContext extends StatementContext { + public TerminalNode IF() { return getToken(PainlessParser.IF, 0); } + public TerminalNode LP() { return getToken(PainlessParser.LP, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode RP() { return getToken(PainlessParser.RP, 0); } + public List trailer() { + return getRuleContexts(TrailerContext.class); + } + public TrailerContext trailer(int i) { + return getRuleContext(TrailerContext.class,i); + } + public TerminalNode ELSE() { return getToken(PainlessParser.ELSE, 0); } + public IfContext(StatementContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitIf(this); + else return visitor.visitChildren(this); + } + } public static class ReturnContext extends StatementContext { public TerminalNode RETURN() { return getToken(PainlessParser.RETURN, 0); } public ExpressionContext expression() { @@ -558,7 +578,7 @@ class PainlessParser extends Parser { enterRule(_localctx, 6, RULE_statement); try { int _alt; - setState(181); + setState(189); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: _localctx = new IfContext(_localctx); @@ -723,56 +743,76 @@ class PainlessParser extends Parser { } break; case 6: - _localctx = new DeclContext(_localctx); + _localctx = new IneachContext(_localctx); enterOuterAlt(_localctx, 6); { setState(156); - declaration(); + match(FOR); setState(157); - delimiter(); + match(LP); + setState(158); + match(ID); + setState(159); + match(IN); + setState(160); + expression(0); + setState(161); + match(RP); + setState(162); + trailer(); } break; case 7: - _localctx = new ContinueContext(_localctx); + _localctx = new DeclContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(159); - match(CONTINUE); - setState(160); - delimiter(); - } - break; - case 8: - _localctx = new BreakContext(_localctx); - enterOuterAlt(_localctx, 8); - { - setState(161); - match(BREAK); - setState(162); - delimiter(); - } - break; - case 9: - _localctx = new ReturnContext(_localctx); - enterOuterAlt(_localctx, 9); - { - setState(163); - match(RETURN); setState(164); - expression(0); + declaration(); setState(165); delimiter(); } break; - case 10: - _localctx = new TryContext(_localctx); - enterOuterAlt(_localctx, 10); + case 8: + _localctx = new ContinueContext(_localctx); + enterOuterAlt(_localctx, 8); { setState(167); - match(TRY); + match(CONTINUE); setState(168); + delimiter(); + } + break; + case 9: + _localctx = new BreakContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(169); + match(BREAK); + setState(170); + delimiter(); + } + break; + case 10: + _localctx = new ReturnContext(_localctx); + enterOuterAlt(_localctx, 10); + { + setState(171); + match(RETURN); + setState(172); + expression(0); + setState(173); + delimiter(); + } + break; + case 11: + _localctx = new TryContext(_localctx); + enterOuterAlt(_localctx, 11); + { + setState(175); + match(TRY); + setState(176); block(); - setState(170); + setState(178); _errHandler.sync(this); _alt = 1; do { @@ -780,7 +820,7 @@ class PainlessParser extends Parser { case 1: { { - setState(169); + setState(177); trap(); } } @@ -788,31 +828,31 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(172); + setState(180); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,10,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; - case 11: + case 12: _localctx = new ThrowContext(_localctx); - enterOuterAlt(_localctx, 11); + enterOuterAlt(_localctx, 12); { - setState(174); + setState(182); match(THROW); - setState(175); + setState(183); expression(0); - setState(176); + setState(184); delimiter(); } break; - case 12: + case 13: _localctx = new ExprContext(_localctx); - enterOuterAlt(_localctx, 12); + enterOuterAlt(_localctx, 13); { - setState(178); + setState(186); expression(0); - setState(179); + setState(187); delimiter(); } break; @@ -851,19 +891,19 @@ class PainlessParser extends Parser { TrailerContext _localctx = new TrailerContext(_ctx, getState()); enterRule(_localctx, 8, RULE_trailer); try { - setState(185); + setState(193); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(183); + setState(191); block(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(184); + setState(192); statement(); } break; @@ -907,25 +947,25 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(187); + setState(195); match(LBRACK); - setState(191); + setState(199); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,13,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(188); + setState(196); statement(); } } } - setState(193); + setState(201); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,13,_ctx); } - setState(194); + setState(202); match(RBRACK); } } @@ -959,7 +999,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(196); + setState(204); match(SEMICOLON); } } @@ -996,19 +1036,19 @@ class PainlessParser extends Parser { InitializerContext _localctx = new InitializerContext(_ctx, getState()); enterRule(_localctx, 14, RULE_initializer); try { - setState(200); + setState(208); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(198); + setState(206); declaration(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(199); + setState(207); expression(0); } break; @@ -1046,7 +1086,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(202); + setState(210); expression(0); } } @@ -1093,23 +1133,23 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(204); + setState(212); decltype(); - setState(205); + setState(213); declvar(); - setState(210); + setState(218); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(206); + setState(214); match(COMMA); - setState(207); + setState(215); declvar(); } } - setState(212); + setState(220); _errHandler.sync(this); _la = _input.LA(1); } @@ -1150,27 +1190,29 @@ class PainlessParser extends Parser { public final DecltypeContext decltype() throws RecognitionException { DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); enterRule(_localctx, 20, RULE_decltype); - int _la; try { + int _alt; enterOuterAlt(_localctx, 1); { - setState(213); + setState(221); match(TYPE); - setState(218); + setState(226); _errHandler.sync(this); - _la = _input.LA(1); - while (_la==LBRACE) { - { - { - setState(214); - match(LBRACE); - setState(215); - match(RBRACE); + _alt = getInterpreter().adaptivePredict(_input,16,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(222); + match(LBRACE); + setState(223); + match(RBRACE); + } + } } - } - setState(220); + setState(228); _errHandler.sync(this); - _la = _input.LA(1); + _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } } } @@ -1209,15 +1251,15 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(221); + setState(229); match(ID); - setState(224); + setState(232); _la = _input.LA(1); if (_la==ASSIGN) { { - setState(222); + setState(230); match(ASSIGN); - setState(223); + setState(231); expression(0); } } @@ -1261,17 +1303,17 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(226); + setState(234); match(CATCH); - setState(227); + setState(235); match(LP); - setState(228); + setState(236); match(TYPE); - setState(229); + setState(237); match(ID); - setState(230); + setState(238); match(RP); - setState(231); + setState(239); block(); } } @@ -1307,7 +1349,7 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(233); + setState(241); _la = _input.LA(1); if ( !(_la==EOF || _la==SEMICOLON) ) { _errHandler.recoverInline(this); @@ -1461,6 +1503,21 @@ class PainlessParser extends Parser { else return visitor.visitChildren(this); } } + public static class InstanceofContext extends ExpressionContext { + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode INSTANCEOF() { return getToken(PainlessParser.INSTANCEOF, 0); } + public DecltypeContext decltype() { + return getRuleContext(DecltypeContext.class,0); + } + public InstanceofContext(ExpressionContext ctx) { copyFrom(ctx); } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PainlessParserVisitor ) return ((PainlessParserVisitor)visitor).visitInstanceof(this); + else return visitor.visitChildren(this); + } + } public final ExpressionContext expression() throws RecognitionException { return expression(0); @@ -1478,7 +1535,7 @@ class PainlessParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(244); + setState(252); switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { case 1: { @@ -1486,16 +1543,16 @@ class PainlessParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(236); + setState(244); chain(true); - setState(237); + setState(245); _la = _input.LA(1); - if ( !(((((_la - 56)) & ~0x3f) == 0 && ((1L << (_la - 56)) & ((1L << (ASSIGN - 56)) | (1L << (AADD - 56)) | (1L << (ASUB - 56)) | (1L << (AMUL - 56)) | (1L << (ADIV - 56)) | (1L << (AREM - 56)) | (1L << (AAND - 56)) | (1L << (AXOR - 56)) | (1L << (AOR - 56)) | (1L << (ALSH - 56)) | (1L << (ARSH - 56)) | (1L << (AUSH - 56)))) != 0)) ) { + if ( !(((((_la - 58)) & ~0x3f) == 0 && ((1L << (_la - 58)) & ((1L << (ASSIGN - 58)) | (1L << (AADD - 58)) | (1L << (ASUB - 58)) | (1L << (AMUL - 58)) | (1L << (ADIV - 58)) | (1L << (AREM - 58)) | (1L << (AAND - 58)) | (1L << (AXOR - 58)) | (1L << (AOR - 58)) | (1L << (ALSH - 58)) | (1L << (ARSH - 58)) | (1L << (AUSH - 58)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(238); + setState(246); expression(1); ((AssignmentContext)_localctx).s = false; } @@ -1505,14 +1562,14 @@ class PainlessParser extends Parser { _localctx = new SingleContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(241); + setState(249); ((SingleContext)_localctx).u = unary(false); ((SingleContext)_localctx).s = ((SingleContext)_localctx).u.s; } break; } _ctx.stop = _input.LT(-1); - setState(310); + setState(323); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1520,23 +1577,23 @@ class PainlessParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(308); + setState(321); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(246); - if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); - setState(247); + setState(254); + if (!(precpred(_ctx, 14))) throw new FailedPredicateException(this, "precpred(_ctx, 14)"); + setState(255); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(248); - expression(14); + setState(256); + expression(15); ((BinaryContext)_localctx).s = false; } break; @@ -1544,17 +1601,17 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(251); - if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(252); + setState(259); + if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)"); + setState(260); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(253); - expression(13); + setState(261); + expression(14); ((BinaryContext)_localctx).s = false; } break; @@ -1562,17 +1619,17 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(256); - if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(257); + setState(264); + if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); + setState(265); _la = _input.LA(1); if ( !(_la==FIND || _la==MATCH) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(258); - expression(12); + setState(266); + expression(13); ((BinaryContext)_localctx).s = false; } break; @@ -1580,17 +1637,17 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(261); - if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(262); + setState(269); + if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); + setState(270); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(263); - expression(11); + setState(271); + expression(12); ((BinaryContext)_localctx).s = false; } break; @@ -1598,17 +1655,17 @@ class PainlessParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(266); - if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(267); + setState(274); + if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); + setState(275); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(268); - expression(10); + setState(276); + expression(11); ((CompContext)_localctx).s = false; } break; @@ -1616,16 +1673,16 @@ class PainlessParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(271); + setState(279); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(272); + setState(280); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(273); + setState(281); expression(9); ((CompContext)_localctx).s = false; } @@ -1634,11 +1691,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(276); + setState(284); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(277); + setState(285); match(BWAND); - setState(278); + setState(286); expression(8); ((BinaryContext)_localctx).s = false; } @@ -1647,11 +1704,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(281); + setState(289); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(282); + setState(290); match(XOR); - setState(283); + setState(291); expression(7); ((BinaryContext)_localctx).s = false; } @@ -1660,11 +1717,11 @@ class PainlessParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(286); + setState(294); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(287); + setState(295); match(BWOR); - setState(288); + setState(296); expression(6); ((BinaryContext)_localctx).s = false; } @@ -1673,11 +1730,11 @@ class PainlessParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(291); + setState(299); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(292); + setState(300); match(BOOLAND); - setState(293); + setState(301); expression(5); ((BoolContext)_localctx).s = false; } @@ -1686,11 +1743,11 @@ class PainlessParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(296); + setState(304); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(297); + setState(305); match(BOOLOR); - setState(298); + setState(306); expression(4); ((BoolContext)_localctx).s = false; } @@ -1699,23 +1756,36 @@ class PainlessParser extends Parser { { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(301); + setState(309); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(302); + setState(310); match(COND); - setState(303); + setState(311); ((ConditionalContext)_localctx).e0 = expression(0); - setState(304); + setState(312); match(COLON); - setState(305); + setState(313); ((ConditionalContext)_localctx).e1 = expression(2); ((ConditionalContext)_localctx).s = ((ConditionalContext)_localctx).e0.s && ((ConditionalContext)_localctx).e1.s; } break; + case 13: + { + _localctx = new InstanceofContext(new ExpressionContext(_parentctx, _parentState)); + pushNewRecursionContext(_localctx, _startState, RULE_expression); + setState(316); + if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); + setState(317); + match(INSTANCEOF); + setState(318); + decltype(); + ((InstanceofContext)_localctx).s = false; + } + break; } } } - setState(312); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1884,22 +1954,22 @@ class PainlessParser extends Parser { enterRule(_localctx, 30, RULE_unary); int _la; try { - setState(350); + setState(363); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: _localctx = new PreContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(313); + setState(326); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(314); + setState(327); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(315); + setState(328); chain(true); } break; @@ -1907,11 +1977,11 @@ class PainlessParser extends Parser { _localctx = new PostContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(316); + setState(329); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(317); + setState(330); chain(true); - setState(318); + setState(331); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -1924,9 +1994,9 @@ class PainlessParser extends Parser { _localctx = new ReadContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(320); + setState(333); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(321); + setState(334); chain(false); } break; @@ -1934,11 +2004,11 @@ class PainlessParser extends Parser { _localctx = new NumericContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(322); + setState(335); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(323); + setState(336); _la = _input.LA(1); - if ( !(((((_la - 68)) & ~0x3f) == 0 && ((1L << (_la - 68)) & ((1L << (OCTAL - 68)) | (1L << (HEX - 68)) | (1L << (INTEGER - 68)) | (1L << (DECIMAL - 68)))) != 0)) ) { + if ( !(((((_la - 70)) & ~0x3f) == 0 && ((1L << (_la - 70)) & ((1L << (OCTAL - 70)) | (1L << (HEX - 70)) | (1L << (INTEGER - 70)) | (1L << (DECIMAL - 70)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -1950,9 +2020,9 @@ class PainlessParser extends Parser { _localctx = new TrueContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(325); + setState(338); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(326); + setState(339); match(TRUE); ((TrueContext)_localctx).s = false; } @@ -1961,9 +2031,9 @@ class PainlessParser extends Parser { _localctx = new FalseContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(328); + setState(341); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(329); + setState(342); match(FALSE); ((FalseContext)_localctx).s = false; } @@ -1972,9 +2042,9 @@ class PainlessParser extends Parser { _localctx = new NullContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(331); + setState(344); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(332); + setState(345); match(NULL); ((NullContext)_localctx).s = false; } @@ -1983,9 +2053,9 @@ class PainlessParser extends Parser { _localctx = new ListinitContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(334); + setState(347); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(335); + setState(348); listinitializer(); ((ListinitContext)_localctx).s = false; } @@ -1994,9 +2064,9 @@ class PainlessParser extends Parser { _localctx = new MapinitContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(338); + setState(351); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(339); + setState(352); mapinitializer(); ((MapinitContext)_localctx).s = false; } @@ -2005,16 +2075,16 @@ class PainlessParser extends Parser { _localctx = new OperatorContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(342); + setState(355); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(343); + setState(356); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(344); + setState(357); unary(false); } break; @@ -2022,13 +2092,13 @@ class PainlessParser extends Parser { _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(345); + setState(358); match(LP); - setState(346); + setState(359); decltype(); - setState(347); + setState(360); match(RP); - setState(348); + setState(361); unary(_localctx.c); } break; @@ -2115,27 +2185,27 @@ class PainlessParser extends Parser { enterRule(_localctx, 32, RULE_chain); try { int _alt; - setState(368); + setState(381); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: _localctx = new DynamicContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(352); + setState(365); ((DynamicContext)_localctx).p = primary(_localctx.c); - setState(356); + setState(369); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(353); + setState(366); secondary(((DynamicContext)_localctx).p.s); } } } - setState(358); + setState(371); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } @@ -2145,23 +2215,23 @@ class PainlessParser extends Parser { _localctx = new StaticContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(359); + setState(372); decltype(); - setState(360); + setState(373); dot(); - setState(364); + setState(377); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(361); + setState(374); secondary(true); } } } - setState(366); + setState(379); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } @@ -2171,7 +2241,7 @@ class PainlessParser extends Parser { _localctx = new NewarrayContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(367); + setState(380); arrayinitializer(); } break; @@ -2289,19 +2359,19 @@ class PainlessParser extends Parser { PrimaryContext _localctx = new PrimaryContext(_ctx, getState(), c); enterRule(_localctx, 34, RULE_primary); try { - setState(389); + setState(402); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: _localctx = new ExprprecContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(370); + setState(383); if (!( !_localctx.c )) throw new FailedPredicateException(this, " !$c "); - setState(371); + setState(384); match(LP); - setState(372); + setState(385); ((ExprprecContext)_localctx).e = expression(0); - setState(373); + setState(386); match(RP); ((ExprprecContext)_localctx).s = ((ExprprecContext)_localctx).e.s; } @@ -2310,13 +2380,13 @@ class PainlessParser extends Parser { _localctx = new ChainprecContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(376); + setState(389); if (!( _localctx.c )) throw new FailedPredicateException(this, " $c "); - setState(377); + setState(390); match(LP); - setState(378); + setState(391); unary(true); - setState(379); + setState(392); match(RP); } break; @@ -2324,7 +2394,7 @@ class PainlessParser extends Parser { _localctx = new StringContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(381); + setState(394); match(STRING); } break; @@ -2332,7 +2402,7 @@ class PainlessParser extends Parser { _localctx = new RegexContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(382); + setState(395); match(REGEX); } break; @@ -2340,7 +2410,7 @@ class PainlessParser extends Parser { _localctx = new VariableContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(383); + setState(396); match(ID); } break; @@ -2348,9 +2418,9 @@ class PainlessParser extends Parser { _localctx = new CalllocalContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(384); + setState(397); match(ID); - setState(385); + setState(398); arguments(); } break; @@ -2358,11 +2428,11 @@ class PainlessParser extends Parser { _localctx = new NewobjectContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(386); + setState(399); match(NEW); - setState(387); + setState(400); match(TYPE); - setState(388); + setState(401); arguments(); } break; @@ -2404,23 +2474,23 @@ class PainlessParser extends Parser { SecondaryContext _localctx = new SecondaryContext(_ctx, getState(), s); enterRule(_localctx, 36, RULE_secondary); try { - setState(395); + setState(408); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(391); + setState(404); if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); - setState(392); + setState(405); dot(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(393); + setState(406); if (!( _localctx.s )) throw new FailedPredicateException(this, " $s "); - setState(394); + setState(407); brace(); } break; @@ -2478,17 +2548,17 @@ class PainlessParser extends Parser { enterRule(_localctx, 38, RULE_dot); int _la; try { - setState(402); + setState(415); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: _localctx = new CallinvokeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(397); + setState(410); match(DOT); - setState(398); + setState(411); match(DOTID); - setState(399); + setState(412); arguments(); } break; @@ -2496,9 +2566,9 @@ class PainlessParser extends Parser { _localctx = new FieldaccessContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(400); + setState(413); match(DOT); - setState(401); + setState(414); _la = _input.LA(1); if ( !(_la==DOTINTEGER || _la==DOTID) ) { _errHandler.recoverInline(this); @@ -2552,11 +2622,11 @@ class PainlessParser extends Parser { _localctx = new BraceaccessContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(404); + setState(417); match(LBRACE); - setState(405); + setState(418); expression(0); - setState(406); + setState(419); match(RBRACE); } } @@ -2603,34 +2673,34 @@ class PainlessParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(408); + setState(421); match(LP); - setState(417); + setState(430); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(409); + setState(422); argument(); - setState(414); + setState(427); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(410); + setState(423); match(COMMA); - setState(411); + setState(424); argument(); } } - setState(416); + setState(429); _errHandler.sync(this); _la = _input.LA(1); } } break; } - setState(419); + setState(432); match(RP); } } @@ -2671,26 +2741,26 @@ class PainlessParser extends Parser { ArgumentContext _localctx = new ArgumentContext(_ctx, getState()); enterRule(_localctx, 44, RULE_argument); try { - setState(424); + setState(437); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(421); + setState(434); expression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(422); + setState(435); lambda(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(423); + setState(436); funcref(); } break; @@ -2745,64 +2815,64 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(439); + setState(452); switch (_input.LA(1)) { case TYPE: case ID: { - setState(426); + setState(439); lamtype(); } break; case LP: { - setState(427); + setState(440); match(LP); - setState(436); + setState(449); _la = _input.LA(1); if (_la==TYPE || _la==ID) { { - setState(428); + setState(441); lamtype(); - setState(433); + setState(446); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(429); + setState(442); match(COMMA); - setState(430); + setState(443); lamtype(); } } - setState(435); + setState(448); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(438); + setState(451); match(RP); } break; default: throw new NoViableAltException(this); } - setState(441); + setState(454); match(ARROW); - setState(444); + setState(457); switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(442); + setState(455); block(); } break; case 2: { - setState(443); + setState(456); expression(0); } break; @@ -2843,16 +2913,16 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(447); + setState(460); _la = _input.LA(1); if (_la==TYPE) { { - setState(446); + setState(459); decltype(); } } - setState(449); + setState(462); match(ID); } } @@ -2895,33 +2965,33 @@ class PainlessParser extends Parser { FuncrefContext _localctx = new FuncrefContext(_ctx, getState()); enterRule(_localctx, 50, RULE_funcref); try { - setState(455); + setState(468); switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(451); + setState(464); classFuncref(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(452); + setState(465); constructorFuncref(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(453); + setState(466); capturingFuncref(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(454); + setState(467); localFuncref(); } break; @@ -2959,11 +3029,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(470); match(TYPE); - setState(458); + setState(471); match(REF); - setState(459); + setState(472); match(ID); } } @@ -3001,11 +3071,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(474); decltype(); - setState(462); + setState(475); match(REF); - setState(463); + setState(476); match(NEW); } } @@ -3043,11 +3113,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(478); match(ID); - setState(466); + setState(479); match(REF); - setState(467); + setState(480); match(ID); } } @@ -3083,11 +3153,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(482); match(THIS); - setState(470); + setState(483); match(REF); - setState(471); + setState(484); match(ID); } } @@ -3178,17 +3248,17 @@ class PainlessParser extends Parser { int _la; try { int _alt; - setState(511); + setState(524); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NewstandardarrayContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(473); + setState(486); match(NEW); - setState(474); + setState(487); match(TYPE); - setState(479); + setState(492); _errHandler.sync(this); _alt = 1; do { @@ -3196,11 +3266,11 @@ class PainlessParser extends Parser { case 1: { { - setState(475); + setState(488); match(LBRACE); - setState(476); + setState(489); expression(0); - setState(477); + setState(490); match(RBRACE); } } @@ -3208,29 +3278,29 @@ class PainlessParser extends Parser { default: throw new NoViableAltException(this); } - setState(481); + setState(494); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(490); + setState(503); switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: { - setState(483); + setState(496); dot(); - setState(487); + setState(500); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(484); + setState(497); secondary(true); } } } - setState(489); + setState(502); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3243,51 +3313,51 @@ class PainlessParser extends Parser { _localctx = new NewinitializedarrayContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(492); - match(NEW); - setState(493); - match(TYPE); - setState(494); - match(LBRACE); - setState(495); - match(RBRACE); - setState(496); - match(LBRACK); setState(505); + match(NEW); + setState(506); + match(TYPE); + setState(507); + match(LBRACE); + setState(508); + match(RBRACE); + setState(509); + match(LBRACK); + setState(518); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(497); + setState(510); expression(0); - setState(502); + setState(515); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(498); + setState(511); match(COMMA); - setState(499); + setState(512); expression(0); } } - setState(504); + setState(517); _errHandler.sync(this); _la = _input.LA(1); } } break; } - setState(508); + setState(521); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(507); + setState(520); match(SEMICOLON); } } - setState(510); + setState(523); match(RBRACK); } break; @@ -3333,41 +3403,41 @@ class PainlessParser extends Parser { enterRule(_localctx, 62, RULE_listinitializer); int _la; try { - setState(526); + setState(539); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(513); + setState(526); match(LBRACE); - setState(514); + setState(527); expression(0); - setState(519); + setState(532); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(515); + setState(528); match(COMMA); - setState(516); + setState(529); expression(0); } } - setState(521); + setState(534); _errHandler.sync(this); _la = _input.LA(1); } - setState(522); + setState(535); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(524); + setState(537); match(LBRACE); - setState(525); + setState(538); match(RBRACE); } break; @@ -3414,43 +3484,43 @@ class PainlessParser extends Parser { enterRule(_localctx, 64, RULE_mapinitializer); int _la; try { - setState(542); + setState(555); switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(528); + setState(541); match(LBRACE); - setState(529); + setState(542); maptoken(); - setState(534); + setState(547); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(530); + setState(543); match(COMMA); - setState(531); + setState(544); maptoken(); } } - setState(536); + setState(549); _errHandler.sync(this); _la = _input.LA(1); } - setState(537); + setState(550); match(RBRACE); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(539); + setState(552); match(LBRACE); - setState(540); + setState(553); match(COLON); - setState(541); + setState(554); match(RBRACE); } break; @@ -3492,11 +3562,11 @@ class PainlessParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(557); expression(0); - setState(545); + setState(558); match(COLON); - setState(546); + setState(559); expression(0); } } @@ -3536,15 +3606,15 @@ class PainlessParser extends Parser { private boolean expression_sempred(ExpressionContext _localctx, int predIndex) { switch (predIndex) { case 1: - return precpred(_ctx, 13); + return precpred(_ctx, 14); case 2: - return precpred(_ctx, 12); + return precpred(_ctx, 13); case 3: - return precpred(_ctx, 11); + return precpred(_ctx, 12); case 4: - return precpred(_ctx, 10); + return precpred(_ctx, 11); case 5: - return precpred(_ctx, 9); + return precpred(_ctx, 10); case 6: return precpred(_ctx, 8); case 7: @@ -3559,13 +3629,13 @@ class PainlessParser extends Parser { return precpred(_ctx, 3); case 12: return precpred(_ctx, 2); + case 13: + return precpred(_ctx, 9); } return true; } private boolean unary_sempred(UnaryContext _localctx, int predIndex) { switch (predIndex) { - case 13: - return !_localctx.c ; case 14: return !_localctx.c ; case 15: @@ -3584,30 +3654,32 @@ class PainlessParser extends Parser { return !_localctx.c ; case 22: return !_localctx.c ; + case 23: + return !_localctx.c ; } return true; } private boolean primary_sempred(PrimaryContext _localctx, int predIndex) { switch (predIndex) { - case 23: - return !_localctx.c ; case 24: + return !_localctx.c ; + case 25: return _localctx.c ; } return true; } private boolean secondary_sempred(SecondaryContext _localctx, int predIndex) { switch (predIndex) { - case 25: - return _localctx.s ; case 26: return _localctx.s ; + case 27: + return _localctx.s ; } return true; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3R\u0227\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3T\u0234\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -3618,202 +3690,207 @@ class PainlessParser extends Parser { "\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5z\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+ "\5\3\5\3\5\5\5\u0087\n\5\3\5\3\5\5\5\u008b\n\5\3\5\3\5\5\5\u008f\n\5\3"+ "\5\3\5\3\5\5\5\u0094\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+ - "\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\6\5\u00ad\n\5\r\5\16\5"+ - "\u00ae\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5\u00b8\n\5\3\6\3\6\5\6\u00bc\n\6"+ - "\3\7\3\7\7\7\u00c0\n\7\f\7\16\7\u00c3\13\7\3\7\3\7\3\b\3\b\3\t\3\t\5\t"+ - "\u00cb\n\t\3\n\3\n\3\13\3\13\3\13\3\13\7\13\u00d3\n\13\f\13\16\13\u00d6"+ - "\13\13\3\f\3\f\3\f\7\f\u00db\n\f\f\f\16\f\u00de\13\f\3\r\3\r\3\r\5\r\u00e3"+ - "\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20\3\20\3\20\3\20"+ - "\3\20\3\20\3\20\3\20\3\20\5\20\u00f7\n\20\3\20\3\20\3\20\3\20\3\20\3\20"+ + "\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5"+ + "\3\5\3\5\6\5\u00b5\n\5\r\5\16\5\u00b6\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5"+ + "\u00c0\n\5\3\6\3\6\5\6\u00c4\n\6\3\7\3\7\7\7\u00c8\n\7\f\7\16\7\u00cb"+ + "\13\7\3\7\3\7\3\b\3\b\3\t\3\t\5\t\u00d3\n\t\3\n\3\n\3\13\3\13\3\13\3\13"+ + "\7\13\u00db\n\13\f\13\16\13\u00de\13\13\3\f\3\f\3\f\7\f\u00e3\n\f\f\f"+ + "\16\f\u00e6\13\f\3\r\3\r\3\r\5\r\u00eb\n\r\3\16\3\16\3\16\3\16\3\16\3"+ + "\16\3\16\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u00ff"+ + "\n\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ - "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20"+ - "\7\20\u0137\n\20\f\20\16\20\u013a\13\20\3\21\3\21\3\21\3\21\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+ - "\3\21\3\21\3\21\5\21\u0161\n\21\3\22\3\22\7\22\u0165\n\22\f\22\16\22\u0168"+ - "\13\22\3\22\3\22\3\22\7\22\u016d\n\22\f\22\16\22\u0170\13\22\3\22\5\22"+ - "\u0173\n\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+ - "\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u0188\n\23\3\24\3\24\3\24\3\24"+ - "\5\24\u018e\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u0195\n\25\3\26\3\26\3"+ - "\26\3\26\3\27\3\27\3\27\3\27\7\27\u019f\n\27\f\27\16\27\u01a2\13\27\5"+ - "\27\u01a4\n\27\3\27\3\27\3\30\3\30\3\30\5\30\u01ab\n\30\3\31\3\31\3\31"+ - "\3\31\3\31\7\31\u01b2\n\31\f\31\16\31\u01b5\13\31\5\31\u01b7\n\31\3\31"+ - "\5\31\u01ba\n\31\3\31\3\31\3\31\5\31\u01bf\n\31\3\32\5\32\u01c2\n\32\3"+ - "\32\3\32\3\33\3\33\3\33\3\33\5\33\u01ca\n\33\3\34\3\34\3\34\3\34\3\35"+ - "\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3"+ - " \3 \6 \u01e2\n \r \16 \u01e3\3 \3 \7 \u01e8\n \f \16 \u01eb\13 \5 \u01ed"+ - "\n \3 \3 \3 \3 \3 \3 \3 \3 \7 \u01f7\n \f \16 \u01fa\13 \5 \u01fc\n \3"+ - " \5 \u01ff\n \3 \5 \u0202\n \3!\3!\3!\3!\7!\u0208\n!\f!\16!\u020b\13!"+ - "\3!\3!\3!\3!\5!\u0211\n!\3\"\3\"\3\"\3\"\7\"\u0217\n\"\f\"\16\"\u021a"+ - "\13\"\3\"\3\"\3\"\3\"\3\"\5\"\u0221\n\"\3#\3#\3#\3#\3#\2\3\36$\2\4\6\b"+ - "\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BD\2\16\3\3\r"+ - "\r\3\2:E\3\2\35\37\3\2 !\3\2\66\67\3\2\"$\3\2%(\3\2),\3\289\3\2FI\4\2"+ - "\33\34 !\3\2QR\u025a\2I\3\2\2\2\4T\3\2\2\2\6Y\3\2\2\2\b\u00b7\3\2\2\2"+ - "\n\u00bb\3\2\2\2\f\u00bd\3\2\2\2\16\u00c6\3\2\2\2\20\u00ca\3\2\2\2\22"+ - "\u00cc\3\2\2\2\24\u00ce\3\2\2\2\26\u00d7\3\2\2\2\30\u00df\3\2\2\2\32\u00e4"+ - "\3\2\2\2\34\u00eb\3\2\2\2\36\u00f6\3\2\2\2 \u0160\3\2\2\2\"\u0172\3\2"+ - "\2\2$\u0187\3\2\2\2&\u018d\3\2\2\2(\u0194\3\2\2\2*\u0196\3\2\2\2,\u019a"+ - "\3\2\2\2.\u01aa\3\2\2\2\60\u01b9\3\2\2\2\62\u01c1\3\2\2\2\64\u01c9\3\2"+ - "\2\2\66\u01cb\3\2\2\28\u01cf\3\2\2\2:\u01d3\3\2\2\2<\u01d7\3\2\2\2>\u0201"+ - "\3\2\2\2@\u0210\3\2\2\2B\u0220\3\2\2\2D\u0222\3\2\2\2FH\5\4\3\2GF\3\2"+ - "\2\2HK\3\2\2\2IG\3\2\2\2IJ\3\2\2\2JO\3\2\2\2KI\3\2\2\2LN\5\b\5\2ML\3\2"+ - "\2\2NQ\3\2\2\2OM\3\2\2\2OP\3\2\2\2PR\3\2\2\2QO\3\2\2\2RS\7\2\2\3S\3\3"+ - "\2\2\2TU\5\26\f\2UV\7P\2\2VW\5\6\4\2WX\5\f\7\2X\5\3\2\2\2Ye\7\t\2\2Z["+ - "\5\26\f\2[b\7P\2\2\\]\7\f\2\2]^\5\26\f\2^_\7P\2\2_a\3\2\2\2`\\\3\2\2\2"+ - "ad\3\2\2\2b`\3\2\2\2bc\3\2\2\2cf\3\2\2\2db\3\2\2\2eZ\3\2\2\2ef\3\2\2\2"+ - "fg\3\2\2\2gh\7\n\2\2h\7\3\2\2\2ij\7\16\2\2jk\7\t\2\2kl\5\36\20\2lm\7\n"+ - "\2\2mq\5\n\6\2no\7\17\2\2or\5\n\6\2pr\6\5\2\2qn\3\2\2\2qp\3\2\2\2r\u00b8"+ - "\3\2\2\2st\7\20\2\2tu\7\t\2\2uv\5\36\20\2vy\7\n\2\2wz\5\n\6\2xz\5\16\b"+ - "\2yw\3\2\2\2yx\3\2\2\2z\u00b8\3\2\2\2{|\7\21\2\2|}\5\f\7\2}~\7\20\2\2"+ - "~\177\7\t\2\2\177\u0080\5\36\20\2\u0080\u0081\7\n\2\2\u0081\u0082\5\34"+ - "\17\2\u0082\u00b8\3\2\2\2\u0083\u0084\7\22\2\2\u0084\u0086\7\t\2\2\u0085"+ - "\u0087\5\20\t\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3"+ - "\2\2\2\u0088\u008a\7\r\2\2\u0089\u008b\5\36\20\2\u008a\u0089\3\2\2\2\u008a"+ - "\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c\u008e\7\r\2\2\u008d\u008f\5\22"+ - "\n\2\u008e\u008d\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090"+ - "\u0093\7\n\2\2\u0091\u0094\5\n\6\2\u0092\u0094\5\16\b\2\u0093\u0091\3"+ - "\2\2\2\u0093\u0092\3\2\2\2\u0094\u00b8\3\2\2\2\u0095\u0096\7\22\2\2\u0096"+ - "\u0097\7\t\2\2\u0097\u0098\5\26\f\2\u0098\u0099\7P\2\2\u0099\u009a\7\63"+ - "\2\2\u009a\u009b\5\36\20\2\u009b\u009c\7\n\2\2\u009c\u009d\5\n\6\2\u009d"+ - "\u00b8\3\2\2\2\u009e\u009f\5\24\13\2\u009f\u00a0\5\34\17\2\u00a0\u00b8"+ - "\3\2\2\2\u00a1\u00a2\7\23\2\2\u00a2\u00b8\5\34\17\2\u00a3\u00a4\7\24\2"+ - "\2\u00a4\u00b8\5\34\17\2\u00a5\u00a6\7\25\2\2\u00a6\u00a7\5\36\20\2\u00a7"+ - "\u00a8\5\34\17\2\u00a8\u00b8\3\2\2\2\u00a9\u00aa\7\27\2\2\u00aa\u00ac"+ - "\5\f\7\2\u00ab\u00ad\5\32\16\2\u00ac\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2"+ - "\u00ae\u00ac\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b8\3\2\2\2\u00b0\u00b1"+ - "\7\31\2\2\u00b1\u00b2\5\36\20\2\u00b2\u00b3\5\34\17\2\u00b3\u00b8\3\2"+ - "\2\2\u00b4\u00b5\5\36\20\2\u00b5\u00b6\5\34\17\2\u00b6\u00b8\3\2\2\2\u00b7"+ - "i\3\2\2\2\u00b7s\3\2\2\2\u00b7{\3\2\2\2\u00b7\u0083\3\2\2\2\u00b7\u0095"+ - "\3\2\2\2\u00b7\u009e\3\2\2\2\u00b7\u00a1\3\2\2\2\u00b7\u00a3\3\2\2\2\u00b7"+ - "\u00a5\3\2\2\2\u00b7\u00a9\3\2\2\2\u00b7\u00b0\3\2\2\2\u00b7\u00b4\3\2"+ - "\2\2\u00b8\t\3\2\2\2\u00b9\u00bc\5\f\7\2\u00ba\u00bc\5\b\5\2\u00bb\u00b9"+ - "\3\2\2\2\u00bb\u00ba\3\2\2\2\u00bc\13\3\2\2\2\u00bd\u00c1\7\5\2\2\u00be"+ - "\u00c0\5\b\5\2\u00bf\u00be\3\2\2\2\u00c0\u00c3\3\2\2\2\u00c1\u00bf\3\2"+ - "\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c4\3\2\2\2\u00c3\u00c1\3\2\2\2\u00c4"+ - "\u00c5\7\6\2\2\u00c5\r\3\2\2\2\u00c6\u00c7\7\r\2\2\u00c7\17\3\2\2\2\u00c8"+ - "\u00cb\5\24\13\2\u00c9\u00cb\5\36\20\2\u00ca\u00c8\3\2\2\2\u00ca\u00c9"+ - "\3\2\2\2\u00cb\21\3\2\2\2\u00cc\u00cd\5\36\20\2\u00cd\23\3\2\2\2\u00ce"+ - "\u00cf\5\26\f\2\u00cf\u00d4\5\30\r\2\u00d0\u00d1\7\f\2\2\u00d1\u00d3\5"+ - "\30\r\2\u00d2\u00d0\3\2\2\2\u00d3\u00d6\3\2\2\2\u00d4\u00d2\3\2\2\2\u00d4"+ - "\u00d5\3\2\2\2\u00d5\25\3\2\2\2\u00d6\u00d4\3\2\2\2\u00d7\u00dc\7O\2\2"+ - "\u00d8\u00d9\7\7\2\2\u00d9\u00db\7\b\2\2\u00da\u00d8\3\2\2\2\u00db\u00de"+ - "\3\2\2\2\u00dc\u00da\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\27\3\2\2\2\u00de"+ - "\u00dc\3\2\2\2\u00df\u00e2\7P\2\2\u00e0\u00e1\7:\2\2\u00e1\u00e3\5\36"+ - "\20\2\u00e2\u00e0\3\2\2\2\u00e2\u00e3\3\2\2\2\u00e3\31\3\2\2\2\u00e4\u00e5"+ - "\7\30\2\2\u00e5\u00e6\7\t\2\2\u00e6\u00e7\7O\2\2\u00e7\u00e8\7P\2\2\u00e8"+ - "\u00e9\7\n\2\2\u00e9\u00ea\5\f\7\2\u00ea\33\3\2\2\2\u00eb\u00ec\t\2\2"+ - "\2\u00ec\35\3\2\2\2\u00ed\u00ee\b\20\1\2\u00ee\u00ef\5\"\22\2\u00ef\u00f0"+ - "\t\3\2\2\u00f0\u00f1\5\36\20\3\u00f1\u00f2\b\20\1\2\u00f2\u00f7\3\2\2"+ - "\2\u00f3\u00f4\5 \21\2\u00f4\u00f5\b\20\1\2\u00f5\u00f7\3\2\2\2\u00f6"+ - "\u00ed\3\2\2\2\u00f6\u00f3\3\2\2\2\u00f7\u0138\3\2\2\2\u00f8\u00f9\f\17"+ - "\2\2\u00f9\u00fa\t\4\2\2\u00fa\u00fb\5\36\20\20\u00fb\u00fc\b\20\1\2\u00fc"+ - "\u0137\3\2\2\2\u00fd\u00fe\f\16\2\2\u00fe\u00ff\t\5\2\2\u00ff\u0100\5"+ - "\36\20\17\u0100\u0101\b\20\1\2\u0101\u0137\3\2\2\2\u0102\u0103\f\r\2\2"+ - "\u0103\u0104\t\6\2\2\u0104\u0105\5\36\20\16\u0105\u0106\b\20\1\2\u0106"+ - "\u0137\3\2\2\2\u0107\u0108\f\f\2\2\u0108\u0109\t\7\2\2\u0109\u010a\5\36"+ - "\20\r\u010a\u010b\b\20\1\2\u010b\u0137\3\2\2\2\u010c\u010d\f\13\2\2\u010d"+ - "\u010e\t\b\2\2\u010e\u010f\5\36\20\f\u010f\u0110\b\20\1\2\u0110\u0137"+ - "\3\2\2\2\u0111\u0112\f\n\2\2\u0112\u0113\t\t\2\2\u0113\u0114\5\36\20\13"+ - "\u0114\u0115\b\20\1\2\u0115\u0137\3\2\2\2\u0116\u0117\f\t\2\2\u0117\u0118"+ - "\7-\2\2\u0118\u0119\5\36\20\n\u0119\u011a\b\20\1\2\u011a\u0137\3\2\2\2"+ - "\u011b\u011c\f\b\2\2\u011c\u011d\7.\2\2\u011d\u011e\5\36\20\t\u011e\u011f"+ - "\b\20\1\2\u011f\u0137\3\2\2\2\u0120\u0121\f\7\2\2\u0121\u0122\7/\2\2\u0122"+ - "\u0123\5\36\20\b\u0123\u0124\b\20\1\2\u0124\u0137\3\2\2\2\u0125\u0126"+ - "\f\6\2\2\u0126\u0127\7\60\2\2\u0127\u0128\5\36\20\7\u0128\u0129\b\20\1"+ - "\2\u0129\u0137\3\2\2\2\u012a\u012b\f\5\2\2\u012b\u012c\7\61\2\2\u012c"+ - "\u012d\5\36\20\6\u012d\u012e\b\20\1\2\u012e\u0137\3\2\2\2\u012f\u0130"+ - "\f\4\2\2\u0130\u0131\7\62\2\2\u0131\u0132\5\36\20\2\u0132\u0133\7\63\2"+ - "\2\u0133\u0134\5\36\20\4\u0134\u0135\b\20\1\2\u0135\u0137\3\2\2\2\u0136"+ - "\u00f8\3\2\2\2\u0136\u00fd\3\2\2\2\u0136\u0102\3\2\2\2\u0136\u0107\3\2"+ - "\2\2\u0136\u010c\3\2\2\2\u0136\u0111\3\2\2\2\u0136\u0116\3\2\2\2\u0136"+ - "\u011b\3\2\2\2\u0136\u0120\3\2\2\2\u0136\u0125\3\2\2\2\u0136\u012a\3\2"+ - "\2\2\u0136\u012f\3\2\2\2\u0137\u013a\3\2\2\2\u0138\u0136\3\2\2\2\u0138"+ - "\u0139\3\2\2\2\u0139\37\3\2\2\2\u013a\u0138\3\2\2\2\u013b\u013c\6\21\17"+ - "\3\u013c\u013d\t\n\2\2\u013d\u0161\5\"\22\2\u013e\u013f\6\21\20\3\u013f"+ - "\u0140\5\"\22\2\u0140\u0141\t\n\2\2\u0141\u0161\3\2\2\2\u0142\u0143\6"+ - "\21\21\3\u0143\u0161\5\"\22\2\u0144\u0145\6\21\22\3\u0145\u0146\t\13\2"+ - "\2\u0146\u0161\b\21\1\2\u0147\u0148\6\21\23\3\u0148\u0149\7L\2\2\u0149"+ - "\u0161\b\21\1\2\u014a\u014b\6\21\24\3\u014b\u014c\7M\2\2\u014c\u0161\b"+ - "\21\1\2\u014d\u014e\6\21\25\3\u014e\u014f\7N\2\2\u014f\u0161\b\21\1\2"+ - "\u0150\u0151\6\21\26\3\u0151\u0152\5@!\2\u0152\u0153\b\21\1\2\u0153\u0161"+ - "\3\2\2\2\u0154\u0155\6\21\27\3\u0155\u0156\5B\"\2\u0156\u0157\b\21\1\2"+ - "\u0157\u0161\3\2\2\2\u0158\u0159\6\21\30\3\u0159\u015a\t\f\2\2\u015a\u0161"+ - "\5 \21\2\u015b\u015c\7\t\2\2\u015c\u015d\5\26\f\2\u015d\u015e\7\n\2\2"+ - "\u015e\u015f\5 \21\2\u015f\u0161\3\2\2\2\u0160\u013b\3\2\2\2\u0160\u013e"+ - "\3\2\2\2\u0160\u0142\3\2\2\2\u0160\u0144\3\2\2\2\u0160\u0147\3\2\2\2\u0160"+ - "\u014a\3\2\2\2\u0160\u014d\3\2\2\2\u0160\u0150\3\2\2\2\u0160\u0154\3\2"+ - "\2\2\u0160\u0158\3\2\2\2\u0160\u015b\3\2\2\2\u0161!\3\2\2\2\u0162\u0166"+ - "\5$\23\2\u0163\u0165\5&\24\2\u0164\u0163\3\2\2\2\u0165\u0168\3\2\2\2\u0166"+ - "\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0173\3\2\2\2\u0168\u0166\3\2"+ - "\2\2\u0169\u016a\5\26\f\2\u016a\u016e\5(\25\2\u016b\u016d\5&\24\2\u016c"+ - "\u016b\3\2\2\2\u016d\u0170\3\2\2\2\u016e\u016c\3\2\2\2\u016e\u016f\3\2"+ - "\2\2\u016f\u0173\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u0173\5> \2\u0172\u0162"+ - "\3\2\2\2\u0172\u0169\3\2\2\2\u0172\u0171\3\2\2\2\u0173#\3\2\2\2\u0174"+ - "\u0175\6\23\31\3\u0175\u0176\7\t\2\2\u0176\u0177\5\36\20\2\u0177\u0178"+ - "\7\n\2\2\u0178\u0179\b\23\1\2\u0179\u0188\3\2\2\2\u017a\u017b\6\23\32"+ - "\3\u017b\u017c\7\t\2\2\u017c\u017d\5 \21\2\u017d\u017e\7\n\2\2\u017e\u0188"+ - "\3\2\2\2\u017f\u0188\7J\2\2\u0180\u0188\7K\2\2\u0181\u0188\7P\2\2\u0182"+ - "\u0183\7P\2\2\u0183\u0188\5,\27\2\u0184\u0185\7\26\2\2\u0185\u0186\7O"+ - "\2\2\u0186\u0188\5,\27\2\u0187\u0174\3\2\2\2\u0187\u017a\3\2\2\2\u0187"+ - "\u017f\3\2\2\2\u0187\u0180\3\2\2\2\u0187\u0181\3\2\2\2\u0187\u0182\3\2"+ - "\2\2\u0187\u0184\3\2\2\2\u0188%\3\2\2\2\u0189\u018a\6\24\33\3\u018a\u018e"+ - "\5(\25\2\u018b\u018c\6\24\34\3\u018c\u018e\5*\26\2\u018d\u0189\3\2\2\2"+ - "\u018d\u018b\3\2\2\2\u018e\'\3\2\2\2\u018f\u0190\7\13\2\2\u0190\u0191"+ - "\7R\2\2\u0191\u0195\5,\27\2\u0192\u0193\7\13\2\2\u0193\u0195\t\r\2\2\u0194"+ - "\u018f\3\2\2\2\u0194\u0192\3\2\2\2\u0195)\3\2\2\2\u0196\u0197\7\7\2\2"+ - "\u0197\u0198\5\36\20\2\u0198\u0199\7\b\2\2\u0199+\3\2\2\2\u019a\u01a3"+ - "\7\t\2\2\u019b\u01a0\5.\30\2\u019c\u019d\7\f\2\2\u019d\u019f\5.\30\2\u019e"+ - "\u019c\3\2\2\2\u019f\u01a2\3\2\2\2\u01a0\u019e\3\2\2\2\u01a0\u01a1\3\2"+ - "\2\2\u01a1\u01a4\3\2\2\2\u01a2\u01a0\3\2\2\2\u01a3\u019b\3\2\2\2\u01a3"+ - "\u01a4\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5\u01a6\7\n\2\2\u01a6-\3\2\2\2"+ - "\u01a7\u01ab\5\36\20\2\u01a8\u01ab\5\60\31\2\u01a9\u01ab\5\64\33\2\u01aa"+ - "\u01a7\3\2\2\2\u01aa\u01a8\3\2\2\2\u01aa\u01a9\3\2\2\2\u01ab/\3\2\2\2"+ - "\u01ac\u01ba\5\62\32\2\u01ad\u01b6\7\t\2\2\u01ae\u01b3\5\62\32\2\u01af"+ - "\u01b0\7\f\2\2\u01b0\u01b2\5\62\32\2\u01b1\u01af\3\2\2\2\u01b2\u01b5\3"+ - "\2\2\2\u01b3\u01b1\3\2\2\2\u01b3\u01b4\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5"+ - "\u01b3\3\2\2\2\u01b6\u01ae\3\2\2\2\u01b6\u01b7\3\2\2\2\u01b7\u01b8\3\2"+ - "\2\2\u01b8\u01ba\7\n\2\2\u01b9\u01ac\3\2\2\2\u01b9\u01ad\3\2\2\2\u01ba"+ - "\u01bb\3\2\2\2\u01bb\u01be\7\65\2\2\u01bc\u01bf\5\f\7\2\u01bd\u01bf\5"+ - "\36\20\2\u01be\u01bc\3\2\2\2\u01be\u01bd\3\2\2\2\u01bf\61\3\2\2\2\u01c0"+ - "\u01c2\5\26\f\2\u01c1\u01c0\3\2\2\2\u01c1\u01c2\3\2\2\2\u01c2\u01c3\3"+ - "\2\2\2\u01c3\u01c4\7P\2\2\u01c4\63\3\2\2\2\u01c5\u01ca\5\66\34\2\u01c6"+ - "\u01ca\58\35\2\u01c7\u01ca\5:\36\2\u01c8\u01ca\5<\37\2\u01c9\u01c5\3\2"+ - "\2\2\u01c9\u01c6\3\2\2\2\u01c9\u01c7\3\2\2\2\u01c9\u01c8\3\2\2\2\u01ca"+ - "\65\3\2\2\2\u01cb\u01cc\7O\2\2\u01cc\u01cd\7\64\2\2\u01cd\u01ce\7P\2\2"+ - "\u01ce\67\3\2\2\2\u01cf\u01d0\5\26\f\2\u01d0\u01d1\7\64\2\2\u01d1\u01d2"+ - "\7\26\2\2\u01d29\3\2\2\2\u01d3\u01d4\7P\2\2\u01d4\u01d5\7\64\2\2\u01d5"+ - "\u01d6\7P\2\2\u01d6;\3\2\2\2\u01d7\u01d8\7\32\2\2\u01d8\u01d9\7\64\2\2"+ - "\u01d9\u01da\7P\2\2\u01da=\3\2\2\2\u01db\u01dc\7\26\2\2\u01dc\u01e1\7"+ - "O\2\2\u01dd\u01de\7\7\2\2\u01de\u01df\5\36\20\2\u01df\u01e0\7\b\2\2\u01e0"+ - "\u01e2\3\2\2\2\u01e1\u01dd\3\2\2\2\u01e2\u01e3\3\2\2\2\u01e3\u01e1\3\2"+ - "\2\2\u01e3\u01e4\3\2\2\2\u01e4\u01ec\3\2\2\2\u01e5\u01e9\5(\25\2\u01e6"+ - "\u01e8\5&\24\2\u01e7\u01e6\3\2\2\2\u01e8\u01eb\3\2\2\2\u01e9\u01e7\3\2"+ - "\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01ed\3\2\2\2\u01eb\u01e9\3\2\2\2\u01ec"+ - "\u01e5\3\2\2\2\u01ec\u01ed\3\2\2\2\u01ed\u0202\3\2\2\2\u01ee\u01ef\7\26"+ - "\2\2\u01ef\u01f0\7O\2\2\u01f0\u01f1\7\7\2\2\u01f1\u01f2\7\b\2\2\u01f2"+ - "\u01fb\7\5\2\2\u01f3\u01f8\5\36\20\2\u01f4\u01f5\7\f\2\2\u01f5\u01f7\5"+ - "\36\20\2\u01f6\u01f4\3\2\2\2\u01f7\u01fa\3\2\2\2\u01f8\u01f6\3\2\2\2\u01f8"+ - "\u01f9\3\2\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fb\u01f3\3\2"+ - "\2\2\u01fb\u01fc\3\2\2\2\u01fc\u01fe\3\2\2\2\u01fd\u01ff\7\r\2\2\u01fe"+ - "\u01fd\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0202\7\6"+ - "\2\2\u0201\u01db\3\2\2\2\u0201\u01ee\3\2\2\2\u0202?\3\2\2\2\u0203\u0204"+ - "\7\7\2\2\u0204\u0209\5\36\20\2\u0205\u0206\7\f\2\2\u0206\u0208\5\36\20"+ - "\2\u0207\u0205\3\2\2\2\u0208\u020b\3\2\2\2\u0209\u0207\3\2\2\2\u0209\u020a"+ - "\3\2\2\2\u020a\u020c\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u020d\7\b\2\2\u020d"+ - "\u0211\3\2\2\2\u020e\u020f\7\7\2\2\u020f\u0211\7\b\2\2\u0210\u0203\3\2"+ - "\2\2\u0210\u020e\3\2\2\2\u0211A\3\2\2\2\u0212\u0213\7\7\2\2\u0213\u0218"+ - "\5D#\2\u0214\u0215\7\f\2\2\u0215\u0217\5D#\2\u0216\u0214\3\2\2\2\u0217"+ - "\u021a\3\2\2\2\u0218\u0216\3\2\2\2\u0218\u0219\3\2\2\2\u0219\u021b\3\2"+ - "\2\2\u021a\u0218\3\2\2\2\u021b\u021c\7\b\2\2\u021c\u0221\3\2\2\2\u021d"+ - "\u021e\7\7\2\2\u021e\u021f\7\63\2\2\u021f\u0221\7\b\2\2\u0220\u0212\3"+ - "\2\2\2\u0220\u021d\3\2\2\2\u0221C\3\2\2\2\u0222\u0223\5\36\20\2\u0223"+ - "\u0224\7\63\2\2\u0224\u0225\5\36\20\2\u0225E\3\2\2\2\62IObeqy\u0086\u008a"+ - "\u008e\u0093\u00ae\u00b7\u00bb\u00c1\u00ca\u00d4\u00dc\u00e2\u00f6\u0136"+ - "\u0138\u0160\u0166\u016e\u0172\u0187\u018d\u0194\u01a0\u01a3\u01aa\u01b3"+ - "\u01b6\u01b9\u01be\u01c1\u01c9\u01e3\u01e9\u01ec\u01f8\u01fb\u01fe\u0201"+ - "\u0209\u0210\u0218\u0220"; + "\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u0144"+ + "\n\20\f\20\16\20\u0147\13\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ + "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ + "\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3"+ + "\21\5\21\u016e\n\21\3\22\3\22\7\22\u0172\n\22\f\22\16\22\u0175\13\22\3"+ + "\22\3\22\3\22\7\22\u017a\n\22\f\22\16\22\u017d\13\22\3\22\5\22\u0180\n"+ + "\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3"+ + "\23\3\23\3\23\3\23\3\23\3\23\5\23\u0195\n\23\3\24\3\24\3\24\3\24\5\24"+ + "\u019b\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u01a2\n\25\3\26\3\26\3\26\3"+ + "\26\3\27\3\27\3\27\3\27\7\27\u01ac\n\27\f\27\16\27\u01af\13\27\5\27\u01b1"+ + "\n\27\3\27\3\27\3\30\3\30\3\30\5\30\u01b8\n\30\3\31\3\31\3\31\3\31\3\31"+ + "\7\31\u01bf\n\31\f\31\16\31\u01c2\13\31\5\31\u01c4\n\31\3\31\5\31\u01c7"+ + "\n\31\3\31\3\31\3\31\5\31\u01cc\n\31\3\32\5\32\u01cf\n\32\3\32\3\32\3"+ + "\33\3\33\3\33\3\33\5\33\u01d7\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35"+ + "\3\35\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \6 \u01ef"+ + "\n \r \16 \u01f0\3 \3 \7 \u01f5\n \f \16 \u01f8\13 \5 \u01fa\n \3 \3 "+ + "\3 \3 \3 \3 \3 \3 \7 \u0204\n \f \16 \u0207\13 \5 \u0209\n \3 \5 \u020c"+ + "\n \3 \5 \u020f\n \3!\3!\3!\3!\7!\u0215\n!\f!\16!\u0218\13!\3!\3!\3!\3"+ + "!\5!\u021e\n!\3\"\3\"\3\"\3\"\7\"\u0224\n\"\f\"\16\"\u0227\13\"\3\"\3"+ + "\"\3\"\3\"\3\"\5\"\u022e\n\"\3#\3#\3#\3#\3#\2\3\36$\2\4\6\b\n\f\16\20"+ + "\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BD\2\16\3\3\r\r\3\2\u020e\3\2\2\2@"+ + "\u021d\3\2\2\2B\u022d\3\2\2\2D\u022f\3\2\2\2FH\5\4\3\2GF\3\2\2\2HK\3\2"+ + "\2\2IG\3\2\2\2IJ\3\2\2\2JO\3\2\2\2KI\3\2\2\2LN\5\b\5\2ML\3\2\2\2NQ\3\2"+ + "\2\2OM\3\2\2\2OP\3\2\2\2PR\3\2\2\2QO\3\2\2\2RS\7\2\2\3S\3\3\2\2\2TU\5"+ + "\26\f\2UV\7R\2\2VW\5\6\4\2WX\5\f\7\2X\5\3\2\2\2Ye\7\t\2\2Z[\5\26\f\2["+ + "b\7R\2\2\\]\7\f\2\2]^\5\26\f\2^_\7R\2\2_a\3\2\2\2`\\\3\2\2\2ad\3\2\2\2"+ + "b`\3\2\2\2bc\3\2\2\2cf\3\2\2\2db\3\2\2\2eZ\3\2\2\2ef\3\2\2\2fg\3\2\2\2"+ + "gh\7\n\2\2h\7\3\2\2\2ij\7\16\2\2jk\7\t\2\2kl\5\36\20\2lm\7\n\2\2mq\5\n"+ + "\6\2no\7\20\2\2or\5\n\6\2pr\6\5\2\2qn\3\2\2\2qp\3\2\2\2r\u00c0\3\2\2\2"+ + "st\7\21\2\2tu\7\t\2\2uv\5\36\20\2vy\7\n\2\2wz\5\n\6\2xz\5\16\b\2yw\3\2"+ + "\2\2yx\3\2\2\2z\u00c0\3\2\2\2{|\7\22\2\2|}\5\f\7\2}~\7\21\2\2~\177\7\t"+ + "\2\2\177\u0080\5\36\20\2\u0080\u0081\7\n\2\2\u0081\u0082\5\34\17\2\u0082"+ + "\u00c0\3\2\2\2\u0083\u0084\7\23\2\2\u0084\u0086\7\t\2\2\u0085\u0087\5"+ + "\20\t\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2\u0088"+ + "\u008a\7\r\2\2\u0089\u008b\5\36\20\2\u008a\u0089\3\2\2\2\u008a\u008b\3"+ + "\2\2\2\u008b\u008c\3\2\2\2\u008c\u008e\7\r\2\2\u008d\u008f\5\22\n\2\u008e"+ + "\u008d\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u0093\7\n"+ + "\2\2\u0091\u0094\5\n\6\2\u0092\u0094\5\16\b\2\u0093\u0091\3\2\2\2\u0093"+ + "\u0092\3\2\2\2\u0094\u00c0\3\2\2\2\u0095\u0096\7\23\2\2\u0096\u0097\7"+ + "\t\2\2\u0097\u0098\5\26\f\2\u0098\u0099\7R\2\2\u0099\u009a\7\65\2\2\u009a"+ + "\u009b\5\36\20\2\u009b\u009c\7\n\2\2\u009c\u009d\5\n\6\2\u009d\u00c0\3"+ + "\2\2\2\u009e\u009f\7\23\2\2\u009f\u00a0\7\t\2\2\u00a0\u00a1\7R\2\2\u00a1"+ + "\u00a2\7\17\2\2\u00a2\u00a3\5\36\20\2\u00a3\u00a4\7\n\2\2\u00a4\u00a5"+ + "\5\n\6\2\u00a5\u00c0\3\2\2\2\u00a6\u00a7\5\24\13\2\u00a7\u00a8\5\34\17"+ + "\2\u00a8\u00c0\3\2\2\2\u00a9\u00aa\7\24\2\2\u00aa\u00c0\5\34\17\2\u00ab"+ + "\u00ac\7\25\2\2\u00ac\u00c0\5\34\17\2\u00ad\u00ae\7\26\2\2\u00ae\u00af"+ + "\5\36\20\2\u00af\u00b0\5\34\17\2\u00b0\u00c0\3\2\2\2\u00b1\u00b2\7\30"+ + "\2\2\u00b2\u00b4\5\f\7\2\u00b3\u00b5\5\32\16\2\u00b4\u00b3\3\2\2\2\u00b5"+ + "\u00b6\3\2\2\2\u00b6\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00c0\3\2"+ + "\2\2\u00b8\u00b9\7\32\2\2\u00b9\u00ba\5\36\20\2\u00ba\u00bb\5\34\17\2"+ + "\u00bb\u00c0\3\2\2\2\u00bc\u00bd\5\36\20\2\u00bd\u00be\5\34\17\2\u00be"+ + "\u00c0\3\2\2\2\u00bfi\3\2\2\2\u00bfs\3\2\2\2\u00bf{\3\2\2\2\u00bf\u0083"+ + "\3\2\2\2\u00bf\u0095\3\2\2\2\u00bf\u009e\3\2\2\2\u00bf\u00a6\3\2\2\2\u00bf"+ + "\u00a9\3\2\2\2\u00bf\u00ab\3\2\2\2\u00bf\u00ad\3\2\2\2\u00bf\u00b1\3\2"+ + "\2\2\u00bf\u00b8\3\2\2\2\u00bf\u00bc\3\2\2\2\u00c0\t\3\2\2\2\u00c1\u00c4"+ + "\5\f\7\2\u00c2\u00c4\5\b\5\2\u00c3\u00c1\3\2\2\2\u00c3\u00c2\3\2\2\2\u00c4"+ + "\13\3\2\2\2\u00c5\u00c9\7\5\2\2\u00c6\u00c8\5\b\5\2\u00c7\u00c6\3\2\2"+ + "\2\u00c8\u00cb\3\2\2\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00cc"+ + "\3\2\2\2\u00cb\u00c9\3\2\2\2\u00cc\u00cd\7\6\2\2\u00cd\r\3\2\2\2\u00ce"+ + "\u00cf\7\r\2\2\u00cf\17\3\2\2\2\u00d0\u00d3\5\24\13\2\u00d1\u00d3\5\36"+ + "\20\2\u00d2\u00d0\3\2\2\2\u00d2\u00d1\3\2\2\2\u00d3\21\3\2\2\2\u00d4\u00d5"+ + "\5\36\20\2\u00d5\23\3\2\2\2\u00d6\u00d7\5\26\f\2\u00d7\u00dc\5\30\r\2"+ + "\u00d8\u00d9\7\f\2\2\u00d9\u00db\5\30\r\2\u00da\u00d8\3\2\2\2\u00db\u00de"+ + "\3\2\2\2\u00dc\u00da\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\25\3\2\2\2\u00de"+ + "\u00dc\3\2\2\2\u00df\u00e4\7Q\2\2\u00e0\u00e1\7\7\2\2\u00e1\u00e3\7\b"+ + "\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e6\3\2\2\2\u00e4\u00e2\3\2\2\2\u00e4"+ + "\u00e5\3\2\2\2\u00e5\27\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7\u00ea\7R\2\2"+ + "\u00e8\u00e9\7<\2\2\u00e9\u00eb\5\36\20\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb"+ + "\3\2\2\2\u00eb\31\3\2\2\2\u00ec\u00ed\7\31\2\2\u00ed\u00ee\7\t\2\2\u00ee"+ + "\u00ef\7Q\2\2\u00ef\u00f0\7R\2\2\u00f0\u00f1\7\n\2\2\u00f1\u00f2\5\f\7"+ + "\2\u00f2\33\3\2\2\2\u00f3\u00f4\t\2\2\2\u00f4\35\3\2\2\2\u00f5\u00f6\b"+ + "\20\1\2\u00f6\u00f7\5\"\22\2\u00f7\u00f8\t\3\2\2\u00f8\u00f9\5\36\20\3"+ + "\u00f9\u00fa\b\20\1\2\u00fa\u00ff\3\2\2\2\u00fb\u00fc\5 \21\2\u00fc\u00fd"+ + "\b\20\1\2\u00fd\u00ff\3\2\2\2\u00fe\u00f5\3\2\2\2\u00fe\u00fb\3\2\2\2"+ + "\u00ff\u0145\3\2\2\2\u0100\u0101\f\20\2\2\u0101\u0102\t\4\2\2\u0102\u0103"+ + "\5\36\20\21\u0103\u0104\b\20\1\2\u0104\u0144\3\2\2\2\u0105\u0106\f\17"+ + "\2\2\u0106\u0107\t\5\2\2\u0107\u0108\5\36\20\20\u0108\u0109\b\20\1\2\u0109"+ + "\u0144\3\2\2\2\u010a\u010b\f\16\2\2\u010b\u010c\t\6\2\2\u010c\u010d\5"+ + "\36\20\17\u010d\u010e\b\20\1\2\u010e\u0144\3\2\2\2\u010f\u0110\f\r\2\2"+ + "\u0110\u0111\t\7\2\2\u0111\u0112\5\36\20\16\u0112\u0113\b\20\1\2\u0113"+ + "\u0144\3\2\2\2\u0114\u0115\f\f\2\2\u0115\u0116\t\b\2\2\u0116\u0117\5\36"+ + "\20\r\u0117\u0118\b\20\1\2\u0118\u0144\3\2\2\2\u0119\u011a\f\n\2\2\u011a"+ + "\u011b\t\t\2\2\u011b\u011c\5\36\20\13\u011c\u011d\b\20\1\2\u011d\u0144"+ + "\3\2\2\2\u011e\u011f\f\t\2\2\u011f\u0120\7/\2\2\u0120\u0121\5\36\20\n"+ + "\u0121\u0122\b\20\1\2\u0122\u0144\3\2\2\2\u0123\u0124\f\b\2\2\u0124\u0125"+ + "\7\60\2\2\u0125\u0126\5\36\20\t\u0126\u0127\b\20\1\2\u0127\u0144\3\2\2"+ + "\2\u0128\u0129\f\7\2\2\u0129\u012a\7\61\2\2\u012a\u012b\5\36\20\b\u012b"+ + "\u012c\b\20\1\2\u012c\u0144\3\2\2\2\u012d\u012e\f\6\2\2\u012e\u012f\7"+ + "\62\2\2\u012f\u0130\5\36\20\7\u0130\u0131\b\20\1\2\u0131\u0144\3\2\2\2"+ + "\u0132\u0133\f\5\2\2\u0133\u0134\7\63\2\2\u0134\u0135\5\36\20\6\u0135"+ + "\u0136\b\20\1\2\u0136\u0144\3\2\2\2\u0137\u0138\f\4\2\2\u0138\u0139\7"+ + "\64\2\2\u0139\u013a\5\36\20\2\u013a\u013b\7\65\2\2\u013b\u013c\5\36\20"+ + "\4\u013c\u013d\b\20\1\2\u013d\u0144\3\2\2\2\u013e\u013f\f\13\2\2\u013f"+ + "\u0140\7\34\2\2\u0140\u0141\5\26\f\2\u0141\u0142\b\20\1\2\u0142\u0144"+ + "\3\2\2\2\u0143\u0100\3\2\2\2\u0143\u0105\3\2\2\2\u0143\u010a\3\2\2\2\u0143"+ + "\u010f\3\2\2\2\u0143\u0114\3\2\2\2\u0143\u0119\3\2\2\2\u0143\u011e\3\2"+ + "\2\2\u0143\u0123\3\2\2\2\u0143\u0128\3\2\2\2\u0143\u012d\3\2\2\2\u0143"+ + "\u0132\3\2\2\2\u0143\u0137\3\2\2\2\u0143\u013e\3\2\2\2\u0144\u0147\3\2"+ + "\2\2\u0145\u0143\3\2\2\2\u0145\u0146\3\2\2\2\u0146\37\3\2\2\2\u0147\u0145"+ + "\3\2\2\2\u0148\u0149\6\21\20\3\u0149\u014a\t\n\2\2\u014a\u016e\5\"\22"+ + "\2\u014b\u014c\6\21\21\3\u014c\u014d\5\"\22\2\u014d\u014e\t\n\2\2\u014e"+ + "\u016e\3\2\2\2\u014f\u0150\6\21\22\3\u0150\u016e\5\"\22\2\u0151\u0152"+ + "\6\21\23\3\u0152\u0153\t\13\2\2\u0153\u016e\b\21\1\2\u0154\u0155\6\21"+ + "\24\3\u0155\u0156\7N\2\2\u0156\u016e\b\21\1\2\u0157\u0158\6\21\25\3\u0158"+ + "\u0159\7O\2\2\u0159\u016e\b\21\1\2\u015a\u015b\6\21\26\3\u015b\u015c\7"+ + "P\2\2\u015c\u016e\b\21\1\2\u015d\u015e\6\21\27\3\u015e\u015f\5@!\2\u015f"+ + "\u0160\b\21\1\2\u0160\u016e\3\2\2\2\u0161\u0162\6\21\30\3\u0162\u0163"+ + "\5B\"\2\u0163\u0164\b\21\1\2\u0164\u016e\3\2\2\2\u0165\u0166\6\21\31\3"+ + "\u0166\u0167\t\f\2\2\u0167\u016e\5 \21\2\u0168\u0169\7\t\2\2\u0169\u016a"+ + "\5\26\f\2\u016a\u016b\7\n\2\2\u016b\u016c\5 \21\2\u016c\u016e\3\2\2\2"+ + "\u016d\u0148\3\2\2\2\u016d\u014b\3\2\2\2\u016d\u014f\3\2\2\2\u016d\u0151"+ + "\3\2\2\2\u016d\u0154\3\2\2\2\u016d\u0157\3\2\2\2\u016d\u015a\3\2\2\2\u016d"+ + "\u015d\3\2\2\2\u016d\u0161\3\2\2\2\u016d\u0165\3\2\2\2\u016d\u0168\3\2"+ + "\2\2\u016e!\3\2\2\2\u016f\u0173\5$\23\2\u0170\u0172\5&\24\2\u0171\u0170"+ + "\3\2\2\2\u0172\u0175\3\2\2\2\u0173\u0171\3\2\2\2\u0173\u0174\3\2\2\2\u0174"+ + "\u0180\3\2\2\2\u0175\u0173\3\2\2\2\u0176\u0177\5\26\f\2\u0177\u017b\5"+ + "(\25\2\u0178\u017a\5&\24\2\u0179\u0178\3\2\2\2\u017a\u017d\3\2\2\2\u017b"+ + "\u0179\3\2\2\2\u017b\u017c\3\2\2\2\u017c\u0180\3\2\2\2\u017d\u017b\3\2"+ + "\2\2\u017e\u0180\5> \2\u017f\u016f\3\2\2\2\u017f\u0176\3\2\2\2\u017f\u017e"+ + "\3\2\2\2\u0180#\3\2\2\2\u0181\u0182\6\23\32\3\u0182\u0183\7\t\2\2\u0183"+ + "\u0184\5\36\20\2\u0184\u0185\7\n\2\2\u0185\u0186\b\23\1\2\u0186\u0195"+ + "\3\2\2\2\u0187\u0188\6\23\33\3\u0188\u0189\7\t\2\2\u0189\u018a\5 \21\2"+ + "\u018a\u018b\7\n\2\2\u018b\u0195\3\2\2\2\u018c\u0195\7L\2\2\u018d\u0195"+ + "\7M\2\2\u018e\u0195\7R\2\2\u018f\u0190\7R\2\2\u0190\u0195\5,\27\2\u0191"+ + "\u0192\7\27\2\2\u0192\u0193\7Q\2\2\u0193\u0195\5,\27\2\u0194\u0181\3\2"+ + "\2\2\u0194\u0187\3\2\2\2\u0194\u018c\3\2\2\2\u0194\u018d\3\2\2\2\u0194"+ + "\u018e\3\2\2\2\u0194\u018f\3\2\2\2\u0194\u0191\3\2\2\2\u0195%\3\2\2\2"+ + "\u0196\u0197\6\24\34\3\u0197\u019b\5(\25\2\u0198\u0199\6\24\35\3\u0199"+ + "\u019b\5*\26\2\u019a\u0196\3\2\2\2\u019a\u0198\3\2\2\2\u019b\'\3\2\2\2"+ + "\u019c\u019d\7\13\2\2\u019d\u019e\7T\2\2\u019e\u01a2\5,\27\2\u019f\u01a0"+ + "\7\13\2\2\u01a0\u01a2\t\r\2\2\u01a1\u019c\3\2\2\2\u01a1\u019f\3\2\2\2"+ + "\u01a2)\3\2\2\2\u01a3\u01a4\7\7\2\2\u01a4\u01a5\5\36\20\2\u01a5\u01a6"+ + "\7\b\2\2\u01a6+\3\2\2\2\u01a7\u01b0\7\t\2\2\u01a8\u01ad\5.\30\2\u01a9"+ + "\u01aa\7\f\2\2\u01aa\u01ac\5.\30\2\u01ab\u01a9\3\2\2\2\u01ac\u01af\3\2"+ + "\2\2\u01ad\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01b1\3\2\2\2\u01af"+ + "\u01ad\3\2\2\2\u01b0\u01a8\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01b2\3\2"+ + "\2\2\u01b2\u01b3\7\n\2\2\u01b3-\3\2\2\2\u01b4\u01b8\5\36\20\2\u01b5\u01b8"+ + "\5\60\31\2\u01b6\u01b8\5\64\33\2\u01b7\u01b4\3\2\2\2\u01b7\u01b5\3\2\2"+ + "\2\u01b7\u01b6\3\2\2\2\u01b8/\3\2\2\2\u01b9\u01c7\5\62\32\2\u01ba\u01c3"+ + "\7\t\2\2\u01bb\u01c0\5\62\32\2\u01bc\u01bd\7\f\2\2\u01bd\u01bf\5\62\32"+ + "\2\u01be\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2\u01c0\u01c1"+ + "\3\2\2\2\u01c1\u01c4\3\2\2\2\u01c2\u01c0\3\2\2\2\u01c3\u01bb\3\2\2\2\u01c3"+ + "\u01c4\3\2\2\2\u01c4\u01c5\3\2\2\2\u01c5\u01c7\7\n\2\2\u01c6\u01b9\3\2"+ + "\2\2\u01c6\u01ba\3\2\2\2\u01c7\u01c8\3\2\2\2\u01c8\u01cb\7\67\2\2\u01c9"+ + "\u01cc\5\f\7\2\u01ca\u01cc\5\36\20\2\u01cb\u01c9\3\2\2\2\u01cb\u01ca\3"+ + "\2\2\2\u01cc\61\3\2\2\2\u01cd\u01cf\5\26\f\2\u01ce\u01cd\3\2\2\2\u01ce"+ + "\u01cf\3\2\2\2\u01cf\u01d0\3\2\2\2\u01d0\u01d1\7R\2\2\u01d1\63\3\2\2\2"+ + "\u01d2\u01d7\5\66\34\2\u01d3\u01d7\58\35\2\u01d4\u01d7\5:\36\2\u01d5\u01d7"+ + "\5<\37\2\u01d6\u01d2\3\2\2\2\u01d6\u01d3\3\2\2\2\u01d6\u01d4\3\2\2\2\u01d6"+ + "\u01d5\3\2\2\2\u01d7\65\3\2\2\2\u01d8\u01d9\7Q\2\2\u01d9\u01da\7\66\2"+ + "\2\u01da\u01db\7R\2\2\u01db\67\3\2\2\2\u01dc\u01dd\5\26\f\2\u01dd\u01de"+ + "\7\66\2\2\u01de\u01df\7\27\2\2\u01df9\3\2\2\2\u01e0\u01e1\7R\2\2\u01e1"+ + "\u01e2\7\66\2\2\u01e2\u01e3\7R\2\2\u01e3;\3\2\2\2\u01e4\u01e5\7\33\2\2"+ + "\u01e5\u01e6\7\66\2\2\u01e6\u01e7\7R\2\2\u01e7=\3\2\2\2\u01e8\u01e9\7"+ + "\27\2\2\u01e9\u01ee\7Q\2\2\u01ea\u01eb\7\7\2\2\u01eb\u01ec\5\36\20\2\u01ec"+ + "\u01ed\7\b\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ea\3\2\2\2\u01ef\u01f0\3\2"+ + "\2\2\u01f0\u01ee\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1\u01f9\3\2\2\2\u01f2"+ + "\u01f6\5(\25\2\u01f3\u01f5\5&\24\2\u01f4\u01f3\3\2\2\2\u01f5\u01f8\3\2"+ + "\2\2\u01f6\u01f4\3\2\2\2\u01f6\u01f7\3\2\2\2\u01f7\u01fa\3\2\2\2\u01f8"+ + "\u01f6\3\2\2\2\u01f9\u01f2\3\2\2\2\u01f9\u01fa\3\2\2\2\u01fa\u020f\3\2"+ + "\2\2\u01fb\u01fc\7\27\2\2\u01fc\u01fd\7Q\2\2\u01fd\u01fe\7\7\2\2\u01fe"+ + "\u01ff\7\b\2\2\u01ff\u0208\7\5\2\2\u0200\u0205\5\36\20\2\u0201\u0202\7"+ + "\f\2\2\u0202\u0204\5\36\20\2\u0203\u0201\3\2\2\2\u0204\u0207\3\2\2\2\u0205"+ + "\u0203\3\2\2\2\u0205\u0206\3\2\2\2\u0206\u0209\3\2\2\2\u0207\u0205\3\2"+ + "\2\2\u0208\u0200\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020b\3\2\2\2\u020a"+ + "\u020c\7\r\2\2\u020b\u020a\3\2\2\2\u020b\u020c\3\2\2\2\u020c\u020d\3\2"+ + "\2\2\u020d\u020f\7\6\2\2\u020e\u01e8\3\2\2\2\u020e\u01fb\3\2\2\2\u020f"+ + "?\3\2\2\2\u0210\u0211\7\7\2\2\u0211\u0216\5\36\20\2\u0212\u0213\7\f\2"+ + "\2\u0213\u0215\5\36\20\2\u0214\u0212\3\2\2\2\u0215\u0218\3\2\2\2\u0216"+ + "\u0214\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u0219\3\2\2\2\u0218\u0216\3\2"+ + "\2\2\u0219\u021a\7\b\2\2\u021a\u021e\3\2\2\2\u021b\u021c\7\7\2\2\u021c"+ + "\u021e\7\b\2\2\u021d\u0210\3\2\2\2\u021d\u021b\3\2\2\2\u021eA\3\2\2\2"+ + "\u021f\u0220\7\7\2\2\u0220\u0225\5D#\2\u0221\u0222\7\f\2\2\u0222\u0224"+ + "\5D#\2\u0223\u0221\3\2\2\2\u0224\u0227\3\2\2\2\u0225\u0223\3\2\2\2\u0225"+ + "\u0226\3\2\2\2\u0226\u0228\3\2\2\2\u0227\u0225\3\2\2\2\u0228\u0229\7\b"+ + "\2\2\u0229\u022e\3\2\2\2\u022a\u022b\7\7\2\2\u022b\u022c\7\65\2\2\u022c"+ + "\u022e\7\b\2\2\u022d\u021f\3\2\2\2\u022d\u022a\3\2\2\2\u022eC\3\2\2\2"+ + "\u022f\u0230\5\36\20\2\u0230\u0231\7\65\2\2\u0231\u0232\5\36\20\2\u0232"+ + "E\3\2\2\2\62IObeqy\u0086\u008a\u008e\u0093\u00b6\u00bf\u00c3\u00c9\u00d2"+ + "\u00dc\u00e4\u00ea\u00fe\u0143\u0145\u016d\u0173\u017b\u017f\u0194\u019a"+ + "\u01a1\u01ad\u01b0\u01b7\u01c0\u01c3\u01c6\u01cb\u01ce\u01d6\u01f0\u01f6"+ + "\u01f9\u0205\u0208\u020b\u020e\u0216\u021d\u0225\u022d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java index 498324441e6..a1279d611e3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserBaseVisitor.java @@ -67,6 +67,13 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitEach(PainlessParser.EachContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitIneach(PainlessParser.IneachContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -228,6 +235,13 @@ class PainlessParserBaseVisitor extends AbstractParseTreeVisitor implement * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitBinary(PainlessParser.BinaryContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitInstanceof(PainlessParser.InstanceofContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java index e57a274f8ef..8a297651070 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParserVisitor.java @@ -63,6 +63,13 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEach(PainlessParser.EachContext ctx); + /** + * Visit a parse tree produced by the {@code ineach} + * labeled alternative in {@link PainlessParser#statement}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIneach(PainlessParser.IneachContext ctx); /** * Visit a parse tree produced by the {@code decl} * labeled alternative in {@link PainlessParser#statement}. @@ -214,6 +221,13 @@ interface PainlessParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitBinary(PainlessParser.BinaryContext ctx); + /** + * Visit a parse tree produced by the {@code instanceof} + * labeled alternative in {@link PainlessParser#expression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitInstanceof(PainlessParser.InstanceofContext ctx); /** * Visit a parse tree produced by the {@code pre} * labeled alternative in {@link PainlessParser#unary}. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 20384b57145..55e3445bace 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -81,7 +81,9 @@ import org.elasticsearch.painless.antlr.PainlessParser.ForContext; import org.elasticsearch.painless.antlr.PainlessParser.FuncrefContext; import org.elasticsearch.painless.antlr.PainlessParser.FunctionContext; import org.elasticsearch.painless.antlr.PainlessParser.IfContext; +import org.elasticsearch.painless.antlr.PainlessParser.IneachContext; import org.elasticsearch.painless.antlr.PainlessParser.InitializerContext; +import org.elasticsearch.painless.antlr.PainlessParser.InstanceofContext; import org.elasticsearch.painless.antlr.PainlessParser.LambdaContext; import org.elasticsearch.painless.antlr.PainlessParser.LamtypeContext; import org.elasticsearch.painless.antlr.PainlessParser.LocalFuncrefContext; @@ -124,6 +126,7 @@ import org.elasticsearch.painless.node.EConditional; import org.elasticsearch.painless.node.EDecimal; import org.elasticsearch.painless.node.EExplicit; import org.elasticsearch.painless.node.EFunctionRef; +import org.elasticsearch.painless.node.EInstanceof; import org.elasticsearch.painless.node.ELambda; import org.elasticsearch.painless.node.ENull; import org.elasticsearch.painless.node.ENumeric; @@ -356,6 +359,17 @@ public final class Walker extends PainlessParserBaseVisitor { return new SEach(location(ctx), type, name, expression, block); } + + @Override + public Object visitIneach(IneachContext ctx) { + reserved.peek().setMaxLoopCounter(settings.getMaxLoopCounter()); + + String name = ctx.ID().getText(); + AExpression expression = (AExpression)visitExpression(ctx.expression()); + SBlock block = (SBlock)visit(ctx.trailer()); + + return new SEach(location(ctx), "def", name, expression, block); + } @Override public Object visitDecl(DeclContext ctx) { @@ -1107,6 +1121,13 @@ public final class Walker extends PainlessParserBaseVisitor { throw location(ctx).createError(new IllegalStateException("Illegal tree structure.")); } + @Override + public Object visitInstanceof(InstanceofContext ctx) { + AExpression expr = (AExpression)visitExpression(ctx.expression()); + String type = ctx.decltype().getText(); + return new EInstanceof(location(ctx), expr, type); + } + /** Returns name of next lambda */ private String nextLambda() { return "lambda$" + syntheticCounter++; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java new file mode 100644 index 00000000000..5f08ff521d8 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.node; + +import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.MethodWriter; + +import java.lang.invoke.MethodType; +import java.util.Objects; +import java.util.Set; + +/** + * Represents instanceof operator. + *

+ * Unlike java's, this works for primitive types too. + */ +public class EInstanceof extends AExpression { + AExpression expression; + final String type; + Class resolvedType; + Class expressionType; + boolean primitiveExpression; + + public EInstanceof(Location location, AExpression expression, String type) { + super(location); + this.expression = Objects.requireNonNull(expression); + this.type = Objects.requireNonNull(type); + } + + @Override + void extractVariables(Set variables) { + expression.extractVariables(variables); + } + + @Override + void analyze(Locals locals) { + Definition.Type raw = Definition.getType(type); + // map to wrapped type for primitive types + resolvedType = MethodType.methodType(raw.clazz).wrap().returnType(); + expression.analyze(locals); + actual = Definition.BOOLEAN_TYPE; + + Definition.Type expressionRaw = expression.actual; + if (expressionRaw == null) { + expressionRaw = Definition.DEF_TYPE; + } + // record if the expression returns a primitive + primitiveExpression = expressionRaw.clazz.isPrimitive(); + // map to wrapped type for primitive types + expressionType = MethodType.methodType(expressionRaw.clazz).wrap().returnType(); + } + + @Override + void write(MethodWriter writer, Globals globals) { + // primitive types + if (primitiveExpression) { + // run the expression anyway (who knows what it does) + expression.write(writer, globals); + // discard its result + writer.writePop(expression.actual.type.getSize()); + // push our result: its a primitive so it cannot be null. + writer.push(resolvedType.isAssignableFrom(expressionType)); + } else { + // ordinary instanceof + expression.write(writer, globals); + writer.instanceOf(org.objectweb.asm.Type.getType(resolvedType)); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 46dc8af5ab9..3eee2a7b2d8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -158,6 +158,7 @@ public class SFunction extends AStatement { access |= Opcodes.ACC_SYNTHETIC; } final MethodWriter function = new MethodWriter(access, method.method, writer, globals.getStatements(), settings); + function.visitCode(); write(function, globals); function.endMethod(); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index 950b021486f..e55ad91d492 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -31,6 +31,7 @@ import org.elasticsearch.painless.node.SFunction.Reserved; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.SimpleChecksAdapter; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; @@ -146,7 +147,6 @@ public final class SSource extends AStatement { // Create the ClassWriter. int classFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; - int classVersion = Opcodes.V1_8; int classAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL; String classBase = BASE_CLASS_TYPE.getInternalName(); String className = CLASS_TYPE.getInternalName(); @@ -155,10 +155,15 @@ public final class SSource extends AStatement { ClassWriter writer = new ClassWriter(classFrames); ClassVisitor visitor = writer; + // if picky is enabled, turn on some checks. instead of VerifyError at the end, you get a helpful stacktrace. + if (settings.isPicky()) { + visitor = new SimpleChecksAdapter(visitor); + } + if (debugStream != null) { visitor = new TraceClassVisitor(visitor, debugStream, null); } - visitor.visit(classVersion, classAccess, className, null, classBase, classInterfaces); + visitor.visit(WriterConstants.CLASS_VERSION, classAccess, className, null, classBase, classInterfaces); visitor.visitSource(Location.computeSourceName(name, source), null); // Write the constructor: @@ -207,6 +212,7 @@ public final class SSource extends AStatement { // Initialize the constants in a static initializer final MethodWriter clinit = new MethodWriter(Opcodes.ACC_STATIC, WriterConstants.CLINIT, visitor, globals.getStatements(), settings); + clinit.visitCode(); for (Constant constant : inits) { constant.initializer.accept(clinit); clinit.putStatic(CLASS_TYPE, constant.name, constant.type); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index 7111afa59f4..2e3d07ebf92 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -22,7 +22,7 @@ *

* The following are the types of nodes: * A* (abstract) - These are the abstract nodes that are the superclasses for the other types. - * I* (interface) -- Thse are marker interfaces to denote a property of the node. + * I* (interface) -- These are marker interfaces to denote a property of the node. * S* (statement) - These are nodes that represent a statement in Painless. These are the highest level nodes. * E* (expression) - These are nodes that represent an expression in Painless. These are the middle level nodes. * L* (link) - These are nodes that represent a piece of a variable/method chain. The are the lowest level nodes. @@ -44,8 +44,9 @@ * {@link org.elasticsearch.painless.node.EDecimal} - Represents a decimal constant. * {@link org.elasticsearch.painless.node.EExplicit} - Represents an explicit cast. * {@link org.elasticsearch.painless.node.EFunctionRef} - Represents a function reference (non-capturing). + * {@link org.elasticsearch.painless.node.EInstanceof} - Represents an instanceof check. * {@link org.elasticsearch.painless.node.EListInit} - Represents a list initialization shortcut. - * {@link org.elasticsearch.painless.node.EMapInit} - Represents a map initializiation shortcut. + * {@link org.elasticsearch.painless.node.EMapInit} - Represents a map initialization shortcut. * {@link org.elasticsearch.painless.node.ENull} - Represents a null constant. * {@link org.elasticsearch.painless.node.ENumeric} - Represents a non-decimal numeric constant. * {@link org.elasticsearch.painless.node.EUnary} - Represents a unary math expression. @@ -92,7 +93,7 @@ * All Painless trees must start with an SSource node at the root. Each node has a constructor that requires * all of its values and children be passed in at the time of instantiation. This means that Painless trees * are build bottom-up; however, this helps enforce tree structure to be correct and fits naturally with a - * standard recurvise-descent parser. + * standard recursive-descent parser. *

* Generally, statement nodes have member data that evaluate legal control-flow during the analysis phase. * The typical order for statement nodes is for each node to call analyze on it's children during the analysis phase diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt index 035dc9ba0c8..13f28d3ebeb 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.lang.txt @@ -36,6 +36,8 @@ class CharSequence -> java.lang.CharSequence { IntStream chars() IntStream codePoints() int length() + String replaceAll*(Pattern,Function) + String replaceFirst*(Pattern,Function) CharSequence subSequence(int,int) String toString() } @@ -52,12 +54,15 @@ class Iterable -> java.lang.Iterable { Spliterator spliterator() # some adaptations of groovy methods boolean any*(Predicate) + Collection asCollection*() + List asList*() def each*(Consumer) def eachWithIndex*(ObjIntConsumer) boolean every*(Predicate) List findResults*(Function) Map groupBy*(Function) String join*(String) + double sum*() double sum*(ToDoubleFunction) } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java index c0872dd1994..241de3c5db8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java @@ -58,6 +58,18 @@ public class AugmentationTests extends ScriptTestCase { exec("List l = new ArrayList(); l.add(1); l.any(x -> x == 1)")); } + public void testIterable_AsCollection() { + assertEquals(true, + exec("List l = new ArrayList(); return l.asCollection() === l")); + } + + public void testIterable_AsList() { + assertEquals(true, + exec("List l = new ArrayList(); return l.asList() === l")); + assertEquals(5, + exec("Set l = new HashSet(); l.add(5); return l.asList()[0]")); + } + public void testIterable_Each() { assertEquals(1, exec("List l = new ArrayList(); l.add(1); List l2 = new ArrayList(); l.each(l2::add); return l2.size()")); @@ -88,6 +100,7 @@ public class AugmentationTests extends ScriptTestCase { } public void testIterable_Sum() { + assertEquals(3.0D, exec("def l = [1,2]; return l.sum()")); assertEquals(5.0D, exec("List l = new ArrayList(); l.add(1); l.add(2); l.sum(x -> x + 1)")); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index 01f3ee42ae6..e023ac364b3 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -130,39 +130,65 @@ public class BasicStatementTests extends ScriptTestCase { public void testIterableForEachStatement() { assertEquals(6, exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (int x : l) total += x; return total")); + assertEquals(6, exec("List l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + + " for (x in l) total += x; return total")); assertEquals("123", exec("List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + " for (String x : l) cat += x; return cat")); + assertEquals("123", exec("List l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + + " for (x in l) cat += x; return cat")); assertEquals("1236", exec("Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + " String cat = ''; int total = 0;" + " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); + assertEquals("1236", exec("Map m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); } public void testIterableForEachStatementDef() { assertEquals(6, exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + " for (int x : l) total += x; return total")); + assertEquals(6, exec("def l = new ArrayList(); l.add(1); l.add(2); l.add(3); int total = 0;" + + " for (x in l) total += x; return total")); assertEquals("123", exec("def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + " for (String x : l) cat += x; return cat")); + assertEquals("123", exec("def l = new ArrayList(); l.add('1'); l.add('2'); l.add('3'); String cat = '';" + + " for (x in l) cat += x; return cat")); assertEquals("1236", exec("def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + " String cat = ''; int total = 0;" + " for (Map.Entry e : m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); + assertEquals("1236", exec("def m = new HashMap(); m.put('1', 1); m.put('2', 2); m.put('3', 3);" + + " String cat = ''; int total = 0;" + + " for (e in m.entrySet()) { cat += e.getKey(); total += e.getValue(); } return cat + total")); } public void testArrayForEachStatement() { assertEquals(6, exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (int x : a) total += x; return total")); + assertEquals(6, exec("int[] a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + + " for (x in a) total += x; return total")); assertEquals("123", exec("String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + " for (String x : a) total += x; return total")); + assertEquals("123", exec("String[] a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + + " for (x in a) total += x; return total")); assertEquals(6, exec("int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + " for (int[] j : i) total += j[0]; return total")); + assertEquals(6, exec("int[][] i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + + " for (j in i) total += j[0]; return total")); } public void testArrayForEachStatementDef() { assertEquals(6, exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + " for (int x : a) total += x; return total")); + assertEquals(6, exec("def a = new int[3]; a[0] = 1; a[1] = 2; a[2] = 3; int total = 0;" + + " for (x in a) total += x; return total")); assertEquals("123", exec("def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + " for (String x : a) total += x; return total")); + assertEquals("123", exec("def a = new String[3]; a[0] = '1'; a[1] = '2'; a[2] = '3'; def total = '';" + + " for (x in a) total += x; return total")); assertEquals(6, exec("def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + " for (int[] j : i) total += j[0]; return total")); + assertEquals(6, exec("def i = new int[3][1]; i[0][0] = 1; i[1][0] = 2; i[2][0] = 3; int total = 0;" + + " for (j in i) total += j[0]; return total")); } public void testDeclarationStatement() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ComparisonTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ComparisonTests.java index 355a1223273..d9259cf2008 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ComparisonTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ComparisonTests.java @@ -439,4 +439,26 @@ public class ComparisonTests extends ScriptTestCase { assertEquals(true, exec("def x = (float)6; double y = (double)2; return x >= y")); assertEquals(true, exec("def x = (double)7; double y = (double)1; return x >= y")); } + + public void testInstanceOf() { + assertEquals(true, exec("int x = 5; return x instanceof int")); + assertEquals(true, exec("int x = 5; return x instanceof Number")); + assertEquals(true, exec("int x = 5; return x instanceof Integer")); + assertEquals(true, exec("int x = 5; return x instanceof def")); + assertEquals(true, exec("int x = 5; return x instanceof Object")); + assertEquals(true, exec("def x = 5; return x instanceof int")); + assertEquals(true, exec("def x = 5; return x instanceof def")); + assertEquals(true, exec("def x = 5; return x instanceof Object")); + assertEquals(true, exec("def x = 5; return x instanceof Integer")); + assertEquals(true, exec("def x = 5; return x instanceof Number")); + assertEquals(false, exec("def x = 5; return x instanceof float")); + assertEquals(false, exec("def x = 5; return x instanceof Map")); + assertEquals(true, exec("List l = new ArrayList(); return l instanceof List")); + assertEquals(false, exec("List l = null; return l instanceof List")); + assertEquals(true, exec("List l = new ArrayList(); return l instanceof Collection")); + assertEquals(false, exec("List l = new ArrayList(); return l instanceof Map")); + assertEquals(true, exec("int[] x = new int[] { 5 }; return x instanceof int[]")); + assertEquals(false, exec("int[] x = new int[] { 5 }; return x instanceof float[]")); + assertEquals(false, exec("int[] x = new int[] { 5 }; return x instanceof int[][]")); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index dbca5243ec2..6bb800eb92c 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -79,7 +79,7 @@ public class LambdaTests extends ScriptTestCase { } public void testMultipleStatements() { - assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { x = x + 1; return x })")); + assertEquals(2, exec("int applyOne(IntFunction arg) { arg.apply(1) } applyOne(x -> { def y = x + 1; return y })")); } public void testUnneededCurlyStatements() { @@ -138,6 +138,7 @@ public class LambdaTests extends ScriptTestCase { assertTrue(expected.getMessage().contains("is read-only")); } + @AwaitsFix(bugUrl = "def type tracking") public void testOnlyCapturesAreReadOnly() { assertEquals(4, exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(x -> { x += 1; return x }).sum();")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java index e255a776bed..615dec67dc4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless; +import java.nio.CharBuffer; import java.util.Arrays; import java.util.HashSet; import java.util.regex.Pattern; @@ -175,6 +176,61 @@ public class RegexTests extends ScriptTestCase { assertEquals(Pattern.CANON_EQ | Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE | Pattern.COMMENTS, exec("/./ciux.flags()")); } + public void testReplaceAllMatchesString() { + assertEquals("thE qUIck brOwn fOx", exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))")); + } + + public void testReplaceAllMatchesCharSequence() { + CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); + assertEquals("thE qUIck brOwn fOx", + exec("params.a.replaceAll(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + } + + public void testReplaceAllNoMatchString() { + assertEquals("i am cat", exec("'i am cat'.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))")); + } + + public void testReplaceAllNoMatchCharSequence() { + CharSequence charSequence = CharBuffer.wrap("i am cat"); + assertEquals("i am cat", + exec("params.a.replaceAll(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + } + + public void testReplaceAllQuoteReplacement() { + assertEquals("th/E q/U/Ick br/Own f/Ox", + exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))")); + assertEquals("th$E q$U$Ick br$Own f$Ox", + exec("'the quick brown fox'.replaceAll(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))")); + } + + public void testReplaceFirstMatchesString() { + assertEquals("thE quick brown fox", + exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))")); + } + + public void testReplaceFirstMatchesCharSequence() { + CharSequence charSequence = CharBuffer.wrap("the quick brown fox"); + assertEquals("thE quick brown fox", + exec("params.a.replaceFirst(/[aeiou]/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + } + + public void testReplaceFirstNoMatchString() { + assertEquals("i am cat", exec("'i am cat'.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))")); + } + + public void testReplaceFirstNoMatchCharSequence() { + CharSequence charSequence = CharBuffer.wrap("i am cat"); + assertEquals("i am cat", + exec("params.a.replaceFirst(/dolphin/, m -> m.group().toUpperCase(Locale.ROOT))", singletonMap("a", charSequence))); + } + + public void testReplaceFirstQuoteReplacement() { + assertEquals("th/E quick brown fox", + exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '/' + m.group().toUpperCase(Locale.ROOT))")); + assertEquals("th$E quick brown fox", + exec("'the quick brown fox'.replaceFirst(/[aeiou]/, m -> '$' + m.group().toUpperCase(Locale.ROOT))")); + } + public void testCantUsePatternCompile() { IllegalArgumentException e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Pattern.compile('aa')"); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java index c451a245754..147eae6e4d1 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanNearQuery; @@ -53,10 +54,13 @@ import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Function; /** * Utility to extract query terms from queries and create queries from documents. @@ -64,64 +68,187 @@ import java.util.Set; public final class ExtractQueryTermsService { private static final byte FIELD_VALUE_SEPARATOR = 0; // nul code point + public static final String EXTRACTION_COMPLETE = "complete"; + public static final String EXTRACTION_PARTIAL = "partial"; + public static final String EXTRACTION_FAILED = "failed"; + + static final Map, Function> queryProcessors; + + static { + Map, Function> map = new HashMap<>(16); + map.put(MatchNoDocsQuery.class, matchNoDocsQuery()); + map.put(ConstantScoreQuery.class, constantScoreQuery()); + map.put(BoostQuery.class, boostQuery()); + map.put(TermQuery.class, termQuery()); + map.put(TermsQuery.class, termsQuery()); + map.put(CommonTermsQuery.class, commonTermsQuery()); + map.put(BlendedTermQuery.class, blendedTermQuery()); + map.put(PhraseQuery.class, phraseQuery()); + map.put(SpanTermQuery.class, spanTermQuery()); + map.put(SpanNearQuery.class, spanNearQuery()); + map.put(SpanOrQuery.class, spanOrQuery()); + map.put(SpanFirstQuery.class, spanFirstQuery()); + map.put(SpanNotQuery.class, spanNotQuery()); + map.put(BooleanQuery.class, booleanQuery()); + map.put(DisjunctionMaxQuery.class, disjunctionMaxQuery()); + map.put(SynonymQuery.class, synonymQuery()); + queryProcessors = Collections.unmodifiableMap(map); + } private ExtractQueryTermsService() { } /** * Extracts all terms from the specified query and adds it to the specified document. + * * @param query The query to extract terms from * @param document The document to add the extracted terms to * @param queryTermsFieldField The field in the document holding the extracted terms - * @param unknownQueryField The field used to mark a document that not all query terms could be extracted. - * For example the query contained an unsupported query (e.g. WildcardQuery). - * @param fieldType The field type for the query metadata field + * @param extractionResultField The field contains whether query term extraction was successful, partial or + * failed. (For example the query contained an unsupported query (e.g. WildcardQuery) + * then query extraction would fail) + * @param fieldType The field type for the query metadata field */ public static void extractQueryTerms(Query query, ParseContext.Document document, String queryTermsFieldField, - String unknownQueryField, FieldType fieldType) { - Set queryTerms; + String extractionResultField, FieldType fieldType) { + Result result; try { - queryTerms = extractQueryTerms(query); + result = extractQueryTerms(query); } catch (UnsupportedQueryException e) { - document.add(new Field(unknownQueryField, new BytesRef(), fieldType)); + document.add(new Field(extractionResultField, EXTRACTION_FAILED, fieldType)); return; } - for (Term term : queryTerms) { + for (Term term : result.terms) { BytesRefBuilder builder = new BytesRefBuilder(); builder.append(new BytesRef(term.field())); builder.append(FIELD_VALUE_SEPARATOR); builder.append(term.bytes()); document.add(new Field(queryTermsFieldField, builder.toBytesRef(), fieldType)); } + if (result.verified) { + document.add(new Field(extractionResultField, EXTRACTION_COMPLETE, fieldType)); + } else { + document.add(new Field(extractionResultField, EXTRACTION_PARTIAL, fieldType)); + } + } + + /** + * Creates a terms query containing all terms from all fields of the specified index reader. + */ + public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, + Term... optionalTerms) throws IOException { + Objects.requireNonNull(queryMetadataField); + + List extractedTerms = new ArrayList<>(); + Collections.addAll(extractedTerms, optionalTerms); + + Fields fields = MultiFields.getFields(indexReader); + for (String field : fields) { + Terms terms = fields.terms(field); + if (terms == null) { + continue; + } + + BytesRef fieldBr = new BytesRef(field); + TermsEnum tenum = terms.iterator(); + for (BytesRef term = tenum.next(); term != null; term = tenum.next()) { + BytesRefBuilder builder = new BytesRefBuilder(); + builder.append(fieldBr); + builder.append(FIELD_VALUE_SEPARATOR); + builder.append(term); + extractedTerms.add(new Term(queryMetadataField, builder.toBytesRef())); + } + } + return new TermsQuery(extractedTerms); } /** * Extracts all query terms from the provided query and adds it to specified list. - * + *

* From boolean query with no should clauses or phrase queries only the longest term are selected, * since that those terms are likely to be the rarest. Boolean query's must_not clauses are always ignored. - * + *

* If from part of the query, no query terms can be extracted then term extraction is stopped and * an UnsupportedQueryException is thrown. */ - static Set extractQueryTerms(Query query) { - if (query instanceof MatchNoDocsQuery) { - // no terms to extract as this query matches no docs - return Collections.emptySet(); - } else if (query instanceof TermQuery) { - return Collections.singleton(((TermQuery) query).getTerm()); - } else if (query instanceof TermsQuery) { - Set terms = new HashSet<>(); + static Result extractQueryTerms(Query query) { + Class queryClass = query.getClass(); + if (queryClass.isAnonymousClass()) { + // Sometimes queries have anonymous classes in that case we need the direct super class. + // (for example blended term query) + queryClass = queryClass.getSuperclass(); + } + Function queryProcessor = queryProcessors.get(queryClass); + if (queryProcessor != null) { + return queryProcessor.apply(query); + } else { + throw new UnsupportedQueryException(query); + } + } + + static Function matchNoDocsQuery() { + return (query -> new Result(true, Collections.emptySet())); + } + + static Function constantScoreQuery() { + return query -> { + Query wrappedQuery = ((ConstantScoreQuery) query).getQuery(); + return extractQueryTerms(wrappedQuery); + }; + } + + static Function boostQuery() { + return query -> { + Query wrappedQuery = ((BoostQuery) query).getQuery(); + return extractQueryTerms(wrappedQuery); + }; + } + + static Function termQuery() { + return (query -> { + TermQuery termQuery = (TermQuery) query; + return new Result(true, Collections.singleton(termQuery.getTerm())); + }); + } + + static Function termsQuery() { + return query -> { TermsQuery termsQuery = (TermsQuery) query; + Set terms = new HashSet<>(); PrefixCodedTerms.TermIterator iterator = termsQuery.getTermData().iterator(); for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { terms.add(new Term(iterator.field(), term)); } - return terms; - } else if (query instanceof PhraseQuery) { + return new Result(true, terms); + }; + } + + static Function synonymQuery() { + return query -> { + Set terms = new HashSet<>(((SynonymQuery) query).getTerms()); + return new Result(true, terms); + }; + } + + static Function commonTermsQuery() { + return query -> { + List terms = ((CommonTermsQuery) query).getTerms(); + return new Result(false, new HashSet<>(terms)); + }; + } + + static Function blendedTermQuery() { + return query -> { + List terms = ((BlendedTermQuery) query).getTerms(); + return new Result(true, new HashSet<>(terms)); + }; + } + + static Function phraseQuery() { + return query -> { Term[] terms = ((PhraseQuery) query).getTerms(); if (terms.length == 0) { - return Collections.emptySet(); + return new Result(true, Collections.emptySet()); } // the longest term is likely to be the rarest, @@ -132,19 +259,76 @@ public final class ExtractQueryTermsService { longestTerm = term; } } - return Collections.singleton(longestTerm); - } else if (query instanceof BooleanQuery) { - List clauses = ((BooleanQuery) query).clauses(); - boolean hasRequiredClauses = false; + return new Result(false, Collections.singleton(longestTerm)); + }; + } + + static Function spanTermQuery() { + return query -> { + Term term = ((SpanTermQuery) query).getTerm(); + return new Result(true, Collections.singleton(term)); + }; + } + + static Function spanNearQuery() { + return query -> { + Set bestClauses = null; + SpanNearQuery spanNearQuery = (SpanNearQuery) query; + for (SpanQuery clause : spanNearQuery.getClauses()) { + Result temp = extractQueryTerms(clause); + bestClauses = selectTermListWithTheLongestShortestTerm(temp.terms, bestClauses); + } + return new Result(false, bestClauses); + }; + } + + static Function spanOrQuery() { + return query -> { + Set terms = new HashSet<>(); + SpanOrQuery spanOrQuery = (SpanOrQuery) query; + for (SpanQuery clause : spanOrQuery.getClauses()) { + terms.addAll(extractQueryTerms(clause).terms); + } + return new Result(false, terms); + }; + } + + static Function spanNotQuery() { + return query -> { + Result result = extractQueryTerms(((SpanNotQuery) query).getInclude()); + return new Result(false, result.terms); + }; + } + + static Function spanFirstQuery() { + return query -> { + Result result = extractQueryTerms(((SpanFirstQuery) query).getMatch()); + return new Result(false, result.terms); + }; + } + + static Function booleanQuery() { + return query -> { + BooleanQuery bq = (BooleanQuery) query; + List clauses = bq.clauses(); + int minimumShouldMatch = bq.getMinimumNumberShouldMatch(); + int numRequiredClauses = 0; + int numOptionalClauses = 0; + int numProhibitedClauses = 0; for (BooleanClause clause : clauses) { if (clause.isRequired()) { - hasRequiredClauses = true; - break; + numRequiredClauses++; + } + if (clause.isProhibited()) { + numProhibitedClauses++; + } + if (clause.getOccur() == BooleanClause.Occur.SHOULD) { + numOptionalClauses++; } } - if (hasRequiredClauses) { - UnsupportedQueryException uqe = null; + if (numRequiredClauses > 0) { Set bestClause = null; + UnsupportedQueryException uqe = null; for (BooleanClause clause : clauses) { if (clause.isRequired() == false) { // skip must_not clauses, we don't need to remember the things that do *not* match... @@ -153,77 +337,56 @@ public final class ExtractQueryTermsService { continue; } - Set temp; + Result temp; try { temp = extractQueryTerms(clause.getQuery()); } catch (UnsupportedQueryException e) { uqe = e; continue; } - bestClause = selectTermListWithTheLongestShortestTerm(temp, bestClause); + bestClause = selectTermListWithTheLongestShortestTerm(temp.terms, bestClause); } if (bestClause != null) { - return bestClause; + return new Result(false, bestClause); } else { if (uqe != null) { + // we're unable to select the best clause and an exception occurred, so we bail throw uqe; + } else { + // We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries, + return new Result(true, Collections.emptySet()); } - return Collections.emptySet(); } } else { - Set terms = new HashSet<>(); + List disjunctions = new ArrayList<>(numOptionalClauses); for (BooleanClause clause : clauses) { - if (clause.isProhibited()) { - // we don't need to remember the things that do *not* match... - continue; + if (clause.getOccur() == BooleanClause.Occur.SHOULD) { + disjunctions.add(clause.getQuery()); } - terms.addAll(extractQueryTerms(clause.getQuery())); } - return terms; + return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0); } - } else if (query instanceof ConstantScoreQuery) { - Query wrappedQuery = ((ConstantScoreQuery) query).getQuery(); - return extractQueryTerms(wrappedQuery); - } else if (query instanceof BoostQuery) { - Query wrappedQuery = ((BoostQuery) query).getQuery(); - return extractQueryTerms(wrappedQuery); - } else if (query instanceof CommonTermsQuery) { - List terms = ((CommonTermsQuery) query).getTerms(); - return new HashSet<>(terms); - } else if (query instanceof BlendedTermQuery) { - List terms = ((BlendedTermQuery) query).getTerms(); - return new HashSet<>(terms); - } else if (query instanceof DisjunctionMaxQuery) { + }; + } + + static Function disjunctionMaxQuery() { + return query -> { List disjuncts = ((DisjunctionMaxQuery) query).getDisjuncts(); - Set terms = new HashSet<>(); - for (Query disjunct : disjuncts) { - terms.addAll(extractQueryTerms(disjunct)); + return handleDisjunction(disjuncts, 1, false); + }; + } + + static Result handleDisjunction(List disjunctions, int minimumShouldMatch, boolean otherClauses) { + boolean verified = minimumShouldMatch <= 1 && otherClauses == false; + Set terms = new HashSet<>(); + for (Query disjunct : disjunctions) { + Result subResult = extractQueryTerms(disjunct); + if (subResult.verified == false) { + verified = false; } - return terms; - } else if (query instanceof SpanTermQuery) { - return Collections.singleton(((SpanTermQuery) query).getTerm()); - } else if (query instanceof SpanNearQuery) { - Set bestClause = null; - SpanNearQuery spanNearQuery = (SpanNearQuery) query; - for (SpanQuery clause : spanNearQuery.getClauses()) { - Set temp = extractQueryTerms(clause); - bestClause = selectTermListWithTheLongestShortestTerm(temp, bestClause); - } - return bestClause; - } else if (query instanceof SpanOrQuery) { - Set terms = new HashSet<>(); - SpanOrQuery spanOrQuery = (SpanOrQuery) query; - for (SpanQuery clause : spanOrQuery.getClauses()) { - terms.addAll(extractQueryTerms(clause)); - } - return terms; - } else if (query instanceof SpanFirstQuery) { - return extractQueryTerms(((SpanFirstQuery)query).getMatch()); - } else if (query instanceof SpanNotQuery) { - return extractQueryTerms(((SpanNotQuery) query).getInclude()); - } else { - throw new UnsupportedQueryException(query); + terms.addAll(subResult.terms); } + return new Result(verified, terms); } static Set selectTermListWithTheLongestShortestTerm(Set terms1, Set terms2) { @@ -243,7 +406,7 @@ public final class ExtractQueryTermsService { } } - private static int minTermLength(Set terms) { + static int minTermLength(Set terms) { int min = Integer.MAX_VALUE; for (Term term : terms) { min = Math.min(min, term.bytes().length); @@ -251,40 +414,22 @@ public final class ExtractQueryTermsService { return min; } - /** - * Creates a boolean query with a should clause for each term on all fields of the specified index reader. - */ - public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, - String unknownQueryField) throws IOException { - Objects.requireNonNull(queryMetadataField); - Objects.requireNonNull(unknownQueryField); + static class Result { - List extractedTerms = new ArrayList<>(); - extractedTerms.add(new Term(unknownQueryField)); - Fields fields = MultiFields.getFields(indexReader); - for (String field : fields) { - Terms terms = fields.terms(field); - if (terms == null) { - continue; - } + final Set terms; + final boolean verified; - BytesRef fieldBr = new BytesRef(field); - TermsEnum tenum = terms.iterator(); - for (BytesRef term = tenum.next(); term != null ; term = tenum.next()) { - BytesRefBuilder builder = new BytesRefBuilder(); - builder.append(fieldBr); - builder.append(FIELD_VALUE_SEPARATOR); - builder.append(term); - extractedTerms.add(new Term(queryMetadataField, builder.toBytesRef())); - } + Result(boolean verified, Set terms) { + this.terms = terms; + this.verified = verified; } - return new TermsQuery(extractedTerms); + } /** * Exception indicating that none or some query terms couldn't extracted from a percolator query. */ - public static class UnsupportedQueryException extends RuntimeException { + static class UnsupportedQueryException extends RuntimeException { private final Query unsupportedQuery; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java index c05c0097c90..afc5b7ab6c7 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java @@ -77,7 +77,7 @@ public class MultiPercolateResponse extends ActionResponse implements Iterablefalse is returned. */ public boolean isFailure() { - return throwable != null; + return exception != null; } - public Throwable getFailure() { - return throwable; + public Exception getFailure() { + return exception; } @Override @@ -161,7 +161,7 @@ public class MultiPercolateResponse extends ActionResponse implements Iterable docs = doc.docs(); - int rootDocIndex = docs.size() - 1; - assert rootDocIndex > 0; - for (int i = 0; i < docs.size(); i++) { - ParseContext.Document d = docs.get(i); - MemoryIndex memoryIndex = MemoryIndex.fromDocument(d, analyzer, true, false); - memoryIndices[i] = memoryIndex.createSearcher().getIndexReader(); - } - try { - MultiReader mReader = new MultiReader(memoryIndices, true); - LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader); - final IndexSearcher slowSearcher = new IndexSearcher(slowReader) { + RAMDirectory ramDirectory = new RAMDirectory(); + try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) { + indexWriter.addDocuments(doc.docs()); + indexWriter.commit(); + DirectoryReader directoryReader = DirectoryReader.open(ramDirectory); + assert directoryReader.leaves().size() == 1 : "Expected single leaf, but got [" + directoryReader.leaves().size() + "]"; + final IndexSearcher slowSearcher = new IndexSearcher(directoryReader) { @Override public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { @@ -542,7 +541,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + public static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; + public static final Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); public static final String CONTENT_TYPE = "percolator"; private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; - public static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; + public static final String EXTRACTION_RESULT_FIELD_NAME = "extraction_result"; public static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; public static class Builder extends FieldMapper.Builder { @@ -75,15 +76,15 @@ public class PercolatorFieldMapper extends FieldMapper { context.path().add(name()); KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME, context); ((PercolatorFieldType) fieldType).queryTermsField = extractedTermsField.fieldType(); - KeywordFieldMapper unknownQueryField = createExtractQueryFieldBuilder(UNKNOWN_QUERY_FIELD_NAME, context); - ((PercolatorFieldType) fieldType).unknownQueryField = unknownQueryField.fieldType(); + KeywordFieldMapper extractionResultField = createExtractQueryFieldBuilder(EXTRACTION_RESULT_FIELD_NAME, context); + ((PercolatorFieldType) fieldType).extractionResultField = extractionResultField.fieldType(); BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder(context); ((PercolatorFieldType) fieldType).queryBuilderField = queryBuilderField.fieldType(); context.path().remove(); setupFieldType(context); return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, - unknownQueryField, queryBuilderField); + extractionResultField, queryBuilderField); } static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) { @@ -102,6 +103,7 @@ public class PercolatorFieldMapper extends FieldMapper { builder.fieldType().setDocValuesType(DocValuesType.BINARY); return builder.build(context); } + } public static class TypeParser implements FieldMapper.TypeParser { @@ -115,7 +117,7 @@ public class PercolatorFieldMapper extends FieldMapper { public static class PercolatorFieldType extends MappedFieldType { private MappedFieldType queryTermsField; - private MappedFieldType unknownQueryField; + private MappedFieldType extractionResultField; private MappedFieldType queryBuilderField; public PercolatorFieldType() { @@ -127,7 +129,7 @@ public class PercolatorFieldMapper extends FieldMapper { public PercolatorFieldType(PercolatorFieldType ref) { super(ref); queryTermsField = ref.queryTermsField; - unknownQueryField = ref.unknownQueryField; + extractionResultField = ref.extractionResultField; queryBuilderField = ref.queryBuilderField; } @@ -135,8 +137,8 @@ public class PercolatorFieldMapper extends FieldMapper { return queryTermsField.name(); } - public String getUnknownQueryFieldName() { - return unknownQueryField.name(); + public String getExtractionResultFieldName() { + return extractionResultField.name(); } public String getQueryBuilderFieldName() { @@ -162,17 +164,17 @@ public class PercolatorFieldMapper extends FieldMapper { private final boolean mapUnmappedFieldAsString; private final QueryShardContext queryShardContext; private KeywordFieldMapper queryTermsField; - private KeywordFieldMapper unknownQueryField; + private KeywordFieldMapper extractionResultField; private BinaryFieldMapper queryBuilderField; public PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, QueryShardContext queryShardContext, - KeywordFieldMapper queryTermsField, KeywordFieldMapper unknownQueryField, + KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField, BinaryFieldMapper queryBuilderField) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; - this.unknownQueryField = unknownQueryField; + this.extractionResultField = extractionResultField; this.queryBuilderField = queryBuilderField; this.mapUnmappedFieldAsString = INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } @@ -181,18 +183,18 @@ public class PercolatorFieldMapper extends FieldMapper { public FieldMapper updateFieldType(Map fullNameToFieldType) { PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType); KeywordFieldMapper queryTermsUpdated = (KeywordFieldMapper) queryTermsField.updateFieldType(fullNameToFieldType); - KeywordFieldMapper unknownQueryUpdated = (KeywordFieldMapper) unknownQueryField.updateFieldType(fullNameToFieldType); + KeywordFieldMapper extractionResultUpdated = (KeywordFieldMapper) extractionResultField.updateFieldType(fullNameToFieldType); BinaryFieldMapper queryBuilderUpdated = (BinaryFieldMapper) queryBuilderField.updateFieldType(fullNameToFieldType); - if (updated == this || queryTermsUpdated == queryTermsField || unknownQueryUpdated == unknownQueryField - || queryBuilderUpdated == queryBuilderField) { + if (updated == this && queryTermsUpdated == queryTermsField && extractionResultUpdated == extractionResultField + && queryBuilderUpdated == queryBuilderField) { return this; } if (updated == this) { updated = (PercolatorFieldMapper) updated.clone(); } updated.queryTermsField = queryTermsUpdated; - updated.unknownQueryField = unknownQueryUpdated; + updated.extractionResultField = extractionResultUpdated; updated.queryBuilderField = queryBuilderUpdated; return updated; } @@ -215,12 +217,12 @@ public class PercolatorFieldMapper extends FieldMapper { try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); - byte[] queryBuilderAsBytes = builder.bytes().toBytes(); + byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); - ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), unknownQueryField.name(), + ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), extractionResultField.name(), queryTermsField.fieldType()); return null; } @@ -258,7 +260,7 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public Iterator iterator() { - return Arrays.asList(queryTermsField, unknownQueryField, queryBuilderField).iterator(); + return Arrays.asList(queryTermsField, extractionResultField, queryBuilderField).iterator(); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java index 23251ca20d0..4359568b3f6 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java @@ -19,45 +19,39 @@ package org.elasticsearch.percolator; -import org.elasticsearch.action.ActionModule; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestHandler; import org.elasticsearch.search.SearchModule; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Map; -public class PercolatorPlugin extends Plugin { +public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin { - public static final String NAME = "percolator"; - - private final boolean transportClientMode; private final Settings settings; public PercolatorPlugin(Settings settings) { - this.transportClientMode = transportClientMode(settings); this.settings = settings; } - public void onModule(ActionModule module) { - module.registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); - module.registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class); + @Override + public List, ? extends ActionResponse>> getActions() { + return Arrays.asList(new ActionHandler<>(PercolateAction.INSTANCE, TransportPercolateAction.class), + new ActionHandler<>(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class)); } - public void onModule(NetworkModule module) { - if (transportClientMode == false) { - module.registerRestHandler(RestPercolateAction.class); - module.registerRestHandler(RestMultiPercolateAction.class); - } - } - - public void onModule(IndicesModule module) { - module.registerMapper(PercolatorFieldMapper.CONTENT_TYPE, new PercolatorFieldMapper.TypeParser()); + @Override + public List> getRestHandlers() { + return Arrays.asList(RestPercolateAction.class, RestMultiPercolateAction.class); } public void onModule(SearchModule module) { @@ -67,10 +61,12 @@ public class PercolatorPlugin extends Plugin { @Override public List> getSettings() { - return Arrays.asList(PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); + return Collections.singletonList(PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); } - static boolean transportClientMode(Settings settings) { - return TransportClient.CLIENT_TYPE.equals(settings.get(Client.CLIENT_TYPE_SETTING_S.getKey())); + @Override + public Map getMappers() { + return Collections.singletonMap(PercolatorFieldMapper.CONTENT_TYPE, new PercolatorFieldMapper.TypeParser()); } + } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java index a2902a9a7c2..5e3a6f90756 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -36,13 +36,10 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestMultiPercolateAction extends BaseRestHandler { private final boolean allowExplicitIndex; - private final TransportMultiPercolateAction action; @Inject - public RestMultiPercolateAction(Settings settings, RestController controller, Client client, - TransportMultiPercolateAction action) { - super(settings, client); - this.action = action; + public RestMultiPercolateAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(POST, "/_mpercolate", this); controller.registerHandler(POST, "/{index}/_mpercolate", this); controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this); @@ -55,13 +52,14 @@ public class RestMultiPercolateAction extends BaseRestHandler { } @Override - public void handleRequest(final RestRequest restRequest, final RestChannel restChannel, final Client client) throws Exception { + public void handleRequest(final RestRequest restRequest, final RestChannel restChannel, final NodeClient client) throws Exception { MultiPercolateRequest multiPercolateRequest = new MultiPercolateRequest(); multiPercolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, multiPercolateRequest.indicesOptions())); multiPercolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index"))); multiPercolateRequest.documentType(restRequest.param("type")); multiPercolateRequest.add(RestActions.getRestContent(restRequest), allowExplicitIndex); - action.execute(multiPercolateRequest, new RestToXContentListener(restChannel)); + client.execute(MultiPercolateAction.INSTANCE, multiPercolateRequest, + new RestToXContentListener(restChannel)); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java index b752cc55f6c..bdbe4921f09 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -36,30 +36,26 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestPercolateAction extends BaseRestHandler { - - private final TransportPercolateAction action; - @Inject - public RestPercolateAction(Settings settings, RestController controller, Client client, TransportPercolateAction action) { - super(settings, client); - this.action = action; + public RestPercolateAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(GET, "/{index}/{type}/_percolate", this); controller.registerHandler(POST, "/{index}/{type}/_percolate", this); - RestPercolateExistingDocHandler existingDocHandler = new RestPercolateExistingDocHandler(settings, controller, client); + RestPercolateExistingDocHandler existingDocHandler = new RestPercolateExistingDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate", existingDocHandler); controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate", existingDocHandler); - RestCountPercolateDocHandler countHandler = new RestCountPercolateDocHandler(settings, controller, client); + RestCountPercolateDocHandler countHandler = new RestCountPercolateDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/_percolate/count", countHandler); controller.registerHandler(POST, "/{index}/{type}/_percolate/count", countHandler); - RestCountPercolateExistingDocHandler countExistingDocHandler = new RestCountPercolateExistingDocHandler(settings, controller, client); + RestCountPercolateExistingDocHandler countExistingDocHandler = new RestCountPercolateExistingDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler); controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler); } - void parseDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, final Client client) { + void parseDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, NodeClient client) { percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index"))); percolateRequest.documentType(restRequest.param("type")); percolateRequest.routing(restRequest.param("routing")); @@ -67,10 +63,11 @@ public class RestPercolateAction extends BaseRestHandler { percolateRequest.source(RestActions.getRestContent(restRequest)); percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions())); - executePercolate(percolateRequest, restChannel); + executePercolate(client, percolateRequest, restChannel); } - void parseExistingDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, final Client client) { + void parseExistingDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, + NodeClient client) { String index = restRequest.param("index"); String type = restRequest.param("type"); percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("percolate_index", index))); @@ -91,27 +88,27 @@ public class RestPercolateAction extends BaseRestHandler { percolateRequest.source(RestActions.getRestContent(restRequest)); percolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, percolateRequest.indicesOptions())); - executePercolate(percolateRequest, restChannel); + executePercolate(client, percolateRequest, restChannel); } - void executePercolate(final PercolateRequest percolateRequest, final RestChannel restChannel) { - action.execute(percolateRequest, new RestToXContentListener<>(restChannel)); + void executePercolate(final NodeClient client, final PercolateRequest percolateRequest, final RestChannel restChannel) { + client.execute(PercolateAction.INSTANCE, percolateRequest, new RestToXContentListener<>(restChannel)); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); parseDocPercolate(percolateRequest, restRequest, restChannel, client); } final class RestCountPercolateDocHandler extends BaseRestHandler { - private RestCountPercolateDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + private RestCountPercolateDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); percolateRequest.onlyCount(true); parseDocPercolate(percolateRequest, restRequest, restChannel, client); @@ -120,12 +117,12 @@ public class RestPercolateAction extends BaseRestHandler { final class RestPercolateExistingDocHandler extends BaseRestHandler { - protected RestPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + protected RestPercolateExistingDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); parseExistingDocPercolate(percolateRequest, restRequest, restChannel, client); } @@ -133,12 +130,12 @@ public class RestPercolateAction extends BaseRestHandler { final class RestCountPercolateExistingDocHandler extends BaseRestHandler { - protected RestCountPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + protected RestCountPercolateExistingDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); percolateRequest.onlyCount(true); parseExistingDocPercolate(percolateRequest, restRequest, restChannel, client); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java index 2b9f77ab82a..9968035ec85 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java @@ -107,7 +107,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction field1.binaryValue().compareTo(field2.binaryValue())); - assertThat(document.getFields().size(), equalTo(2)); - assertThat(document.getFields().get(0).name(), equalTo(QUERY_TERMS_FIELD)); - assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field1\u0000term1")); - assertThat(document.getFields().get(1).name(), equalTo(QUERY_TERMS_FIELD)); - assertThat(document.getFields().get(1).binaryValue().utf8ToString(), equalTo("field2\u0000term2")); + extractQueryTerms(bq.build(), document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); + assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_COMPLETE)); + List fields = new ArrayList<>(Arrays.asList(document.getFields(QUERY_TERMS_FIELD))); + Collections.sort(fields, (field1, field2) -> field1.binaryValue().compareTo(field2.binaryValue())); + assertThat(fields.size(), equalTo(2)); + assertThat(fields.get(0).name(), equalTo(QUERY_TERMS_FIELD)); + assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field1\u0000term1")); + assertThat(fields.get(1).name(), equalTo(QUERY_TERMS_FIELD)); + assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field2\u0000term2")); } public void testExtractQueryMetadata_unsupported() { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - TermQuery termQuery1 = new TermQuery(new Term("field1", "term1")); - bq.add(termQuery1, BooleanClause.Occur.SHOULD); - TermQuery termQuery2 = new TermQuery(new Term("field2", "term2")); - bq.add(termQuery2, BooleanClause.Occur.SHOULD); - TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true); ParseContext.Document document = new ParseContext.Document(); - extractQueryTerms(query, document, QUERY_TERMS_FIELD, UNKNOWN_QUERY_FIELD, QUERY_TERMS_FIELD_TYPE); + extractQueryTerms(query, document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); assertThat(document.getFields().size(), equalTo(1)); - assertThat(document.getFields().get(0).name(), equalTo(UNKNOWN_QUERY_FIELD)); - assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("")); + assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_FAILED)); + } + + public void testExtractQueryMetadata_notVerified() { + PhraseQuery phraseQuery = new PhraseQuery("field", "term"); + + ParseContext.Document document = new ParseContext.Document(); + extractQueryTerms(phraseQuery, document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); + assertThat(document.getFields().size(), equalTo(2)); + assertThat(document.getFields().get(0).name(), equalTo(QUERY_TERMS_FIELD)); + assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term")); + assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_PARTIAL)); } public void testExtractQueryMetadata_termQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_term")); - List terms = new ArrayList<>(extractQueryTerms(termQuery)); + Result result = extractQueryTerms(termQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery.getTerm().bytes())); @@ -115,7 +131,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_termsQuery() { TermsQuery termsQuery = new TermsQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); - List terms = new ArrayList<>(extractQueryTerms(termsQuery)); + Result result = extractQueryTerms(termsQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field")); @@ -125,7 +143,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { // test with different fields termsQuery = new TermsQuery(new Term("_field1", "_term1"), new Term("_field2", "_term2")); - terms = new ArrayList<>(extractQueryTerms(termsQuery)); + result = extractQueryTerms(termsQuery); + assertThat(result.verified, is(true)); + terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field1")); @@ -136,7 +156,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_phraseQuery() { PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); - List terms = new ArrayList<>(extractQueryTerms(phraseQuery)); + Result result = extractQueryTerms(phraseQuery); + assertThat(result.verified, is(false)); + List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); @@ -157,7 +179,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - List terms = new ArrayList<>(extractQueryTerms(booleanQuery)); + Result result = extractQueryTerms(booleanQuery); + assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); + List terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(3)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); @@ -183,7 +207,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - List terms = new ArrayList<>(extractQueryTerms(booleanQuery)); + Result result = extractQueryTerms(booleanQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(4)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); @@ -204,16 +230,74 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(phraseQuery, BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - List terms = new ArrayList<>(extractQueryTerms(booleanQuery)); + Result result = extractQueryTerms(booleanQuery); + assertThat(result.verified, is(false)); + List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); } + public void testExactMatch_booleanQuery() { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + Result result = extractQueryTerms(builder.build()); + assertThat("All clauses are exact, so candidate matches are verified", result.verified, is(true)); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + PhraseQuery phraseQuery1 = new PhraseQuery("_field", "_term1", "_term2"); + builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); + result = extractQueryTerms(builder.build()); + assertThat("Clause isn't exact, so candidate matches are not verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); + PhraseQuery phraseQuery2 = new PhraseQuery("_field", "_term3", "_term4"); + builder.add(phraseQuery2, BooleanClause.Occur.SHOULD); + result = extractQueryTerms(builder.build()); + assertThat("No clause is exact, so candidate matches are not verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + result = extractQueryTerms(builder.build()); + assertThat("There is a must_not clause, so candidate matches are not verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.setMinimumNumberShouldMatch(randomIntBetween(2, 32)); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + result = extractQueryTerms(builder.build()); + assertThat("Minimum match is >= 1, so candidate matches are not verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); + result = extractQueryTerms(builder.build()); + assertThat("Single required clause, so candidate matches are verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); + builder.add(termQuery2, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); + result = extractQueryTerms(builder.build()); + assertThat("Two or more required clauses, so candidate matches are not verified", result.verified, is(false)); + + builder = new BooleanQuery.Builder(); + builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); + builder.add(termQuery2, BooleanClause.Occur.MUST_NOT); + result = extractQueryTerms(builder.build()); + assertThat("Required and prohibited clauses, so candidate matches are not verified", result.verified, is(false)); + } + public void testExtractQueryMetadata_constantScoreQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(termQuery1); - List terms = new ArrayList<>(extractQueryTerms(constantScoreQuery)); + Result result = extractQueryTerms(constantScoreQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); @@ -222,7 +306,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_boostQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BoostQuery constantScoreQuery = new BoostQuery(termQuery1, 1f); - List terms = new ArrayList<>(extractQueryTerms(constantScoreQuery)); + Result result = extractQueryTerms(constantScoreQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); @@ -232,7 +318,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 100); commonTermsQuery.add(new Term("_field", "_term1")); commonTermsQuery.add(new Term("_field", "_term2")); - List terms = new ArrayList<>(extractQueryTerms(commonTermsQuery)); + Result result = extractQueryTerms(commonTermsQuery); + assertThat(result.verified, is(false)); + List terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo("_field")); @@ -242,15 +330,17 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { } public void testExtractQueryMetadata_blendedTermQuery() { - Term[] terms = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; - BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(terms, false); - List result = new ArrayList<>(extractQueryTerms(commonTermsQuery)); - Collections.sort(result); - assertThat(result.size(), equalTo(2)); - assertThat(result.get(0).field(), equalTo("_field")); - assertThat(result.get(0).text(), equalTo("_term1")); - assertThat(result.get(1).field(), equalTo("_field")); - assertThat(result.get(1).text(), equalTo("_term2")); + Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; + BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(termsArr, false); + Result result = extractQueryTerms(commonTermsQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); + Collections.sort(terms); + assertThat(terms.size(), equalTo(2)); + assertThat(terms.get(0).field(), equalTo("_field")); + assertThat(terms.get(0).text(), equalTo("_term1")); + assertThat(terms.get(1).field(), equalTo("_field")); + assertThat(terms.get(1).text(), equalTo("_term2")); } public void testExtractQueryMetadata_spanTermQuery() { @@ -266,8 +356,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { // 4) FieldMaskingSpanQuery is a tricky query so we shouldn't optimize this SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); - Set terms = extractQueryTerms(spanTermQuery1); - assertTermsEqual(terms, spanTermQuery1.getTerm()); + Result result = extractQueryTerms(spanTermQuery1); + assertThat(result.verified, is(true)); + assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_spanNearQuery() { @@ -275,48 +366,109 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); - Set terms = extractQueryTerms(spanNearQuery); - assertTermsEqual(terms, spanTermQuery2.getTerm()); + + Result result = extractQueryTerms(spanNearQuery); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, spanTermQuery2.getTerm()); } public void testExtractQueryMetadata_spanOrQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2); - Set terms = extractQueryTerms(spanOrQuery); - assertTermsEqual(terms, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); + Result result = extractQueryTerms(spanOrQuery); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } public void testExtractQueryMetadata_spanFirstQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanFirstQuery spanFirstQuery = new SpanFirstQuery(spanTermQuery1, 20); - Set terms = extractQueryTerms(spanFirstQuery); - assertTermsEqual(terms, spanTermQuery1.getTerm()); + Result result = extractQueryTerms(spanFirstQuery); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_spanNotQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNotQuery spanNotQuery = new SpanNotQuery(spanTermQuery1, spanTermQuery2); - Set terms = extractQueryTerms(spanNotQuery); - assertTermsEqual(terms, spanTermQuery1.getTerm()); + Result result = extractQueryTerms(spanNotQuery); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_matchNoDocsQuery() { - Set terms = extractQueryTerms(new MatchNoDocsQuery("sometimes there is no reason at all")); - assertEquals(0, terms.size()); + Result result = extractQueryTerms(new MatchNoDocsQuery("sometimes there is no reason at all")); + assertThat(result.verified, is(true)); + assertEquals(0, result.terms.size()); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST); - terms = extractQueryTerms(bq.build()); - assertEquals(0, terms.size()); + result = extractQueryTerms(bq.build()); + assertThat(result.verified, is(false)); + assertEquals(0, result.terms.size()); bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD); - terms = extractQueryTerms(bq.build()); - assertTermsEqual(terms, new Term("field", "value")); + result = extractQueryTerms(bq.build()); + assertThat(result.verified, is(true)); + assertTermsEqual(result.terms, new Term("field", "value")); + + DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery( + Arrays.asList(new TermQuery(new Term("field", "value")), new MatchNoDocsQuery("sometimes there is no reason at all")), + 1f + ); + result = extractQueryTerms(disjunctionMaxQuery); + assertThat(result.verified, is(true)); + assertTermsEqual(result.terms, new Term("field", "value")); + } + + public void testExtractQueryMetadata_matchAllDocsQuery() { + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(new MatchAllDocsQuery())); + + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + Result result = extractQueryTerms(builder.build()); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, new Term("field", "value")); + + builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + BooleanQuery bq1 = builder.build(); + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq1)); + + builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + BooleanQuery bq2 = builder.build(); + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq2)); + + builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + BooleanQuery bq3 = builder.build(); + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq3)); + + builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + BooleanQuery bq4 = builder.build(); + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq4)); + + builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + BooleanQuery bq5 = builder.build(); + expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq5)); } public void testExtractQueryMetadata_unsupportedQuery() { @@ -343,8 +495,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); - Set terms = extractQueryTerms(bq1); - assertTermsEqual(terms, termQuery1.getTerm()); + Result result = extractQueryTerms(bq1); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, termQuery1.getTerm()); TermQuery termQuery2 = new TermQuery(new Term("_field", "_longer_term")); builder = new BooleanQuery.Builder(); @@ -352,8 +505,9 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(termQuery2, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); bq1 = builder.build(); - terms = extractQueryTerms(bq1); - assertTermsEqual(terms, termQuery2.getTerm()); + result = extractQueryTerms(bq1); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, termQuery2.getTerm()); builder = new BooleanQuery.Builder(); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); @@ -372,7 +526,27 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f ); - List terms = new ArrayList<>(extractQueryTerms(disjunctionMaxQuery)); + Result result = extractQueryTerms(disjunctionMaxQuery); + assertThat(result.verified, is(true)); + List terms = new ArrayList<>(result.terms); + Collections.sort(terms); + assertThat(terms.size(), equalTo(4)); + assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); + assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); + assertThat(terms.get(1).field(), equalTo(termQuery2.getTerm().field())); + assertThat(terms.get(1).bytes(), equalTo(termQuery2.getTerm().bytes())); + assertThat(terms.get(2).field(), equalTo(termQuery3.getTerm().field())); + assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); + assertThat(terms.get(3).field(), equalTo(termQuery4.getTerm().field())); + assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); + + disjunctionMaxQuery = new DisjunctionMaxQuery( + Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f + ); + + result = extractQueryTerms(disjunctionMaxQuery); + assertThat(result.verified, is(false)); + terms = new ArrayList<>(result.terms); Collections.sort(terms); assertThat(terms.size(), equalTo(4)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); @@ -385,6 +559,18 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { assertThat(terms.get(3).bytes(), equalTo(termQuery4.getTerm().bytes())); } + public void testSynonymQuery() { + SynonymQuery query = new SynonymQuery(); + Result result = extractQueryTerms(query); + assertThat(result.verified, is(true)); + assertThat(result.terms.isEmpty(), is(true)); + + query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); + result = extractQueryTerms(query); + assertThat(result.verified, is(true)); + assertTermsEqual(result.terms, new Term("_field", "_value1"), new Term("_field", "_value2")); + } + public void testCreateQueryMetadataQuery() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(false); memoryIndex.addField("field1", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); @@ -394,7 +580,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { IndexReader indexReader = memoryIndex.createSearcher().getIndexReader(); TermsQuery query = (TermsQuery) - createQueryTermsQuery(indexReader, QUERY_TERMS_FIELD, UNKNOWN_QUERY_FIELD); + createQueryTermsQuery(indexReader, QUERY_TERMS_FIELD, new Term(EXTRACTION_RESULT_FIELD, EXTRACTION_FAILED)); PrefixCodedTerms terms = query.getTermData(); assertThat(terms.size(), equalTo(15L)); @@ -413,7 +599,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { assertTermIterator(termIterator, "field2\u0000some", QUERY_TERMS_FIELD); assertTermIterator(termIterator, "field2\u0000text", QUERY_TERMS_FIELD); assertTermIterator(termIterator, "field4\u0000123", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "", UNKNOWN_QUERY_FIELD); + assertTermIterator(termIterator, EXTRACTION_FAILED, EXTRACTION_RESULT_FIELD); } public void testSelectTermsListWithHighestSumOfTermLength() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 15d531467d4..0a359376f7b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -53,8 +53,8 @@ import static org.hamcrest.Matchers.nullValue; public class MultiPercolatorIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 4879badc7d3..c2c2a641a71 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; @@ -37,14 +38,21 @@ import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FilterScorer; +import org.apache.lucene.search.FilteredDocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.Weight; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; @@ -52,6 +60,7 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; @@ -61,8 +70,11 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Function; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; @@ -70,9 +82,9 @@ import static org.hamcrest.Matchers.is; public class PercolateQueryTests extends ESTestCase { - public final static String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; - public final static String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; - public static FieldType EXTRACTED_TERMS_FIELD_TYPE = new FieldType(); + public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; + public static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; + public static final FieldType EXTRACTED_TERMS_FIELD_TYPE = new FieldType(); static { EXTRACTED_TERMS_FIELD_TYPE.setTokenized(false); @@ -247,34 +259,91 @@ public class PercolateQueryTests extends ESTestCase { } public void testDuel() throws Exception { - int numQueries = scaledRandomIntBetween(32, 256); - for (int i = 0; i < numQueries; i++) { - String id = Integer.toString(i); - Query query; + List> queries = new ArrayList<>(); + queries.add((id) -> new PrefixQuery(new Term("field", id))); + queries.add((id) -> new WildcardQuery(new Term("field", id + "*"))); + queries.add((id) -> new CustomQuery(new Term("field", id))); + queries.add((id) -> new SpanTermQuery(new Term("field", id))); + queries.add((id) -> new TermQuery(new Term("field", id))); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + return builder.build(); + }); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.MUST); if (randomBoolean()) { - query = new PrefixQuery(new Term("field", id)); - } else if (randomBoolean()) { - query = new WildcardQuery(new Term("field", id + "*")); - } else if (randomBoolean()) { - query = new CustomQuery(new Term("field", id + "*")); - } else if (randomBoolean()) { - query = new SpanTermQuery(new Term("field", id)); - } else { - query = new TermQuery(new Term("field", id)); + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); } - addPercolatorQuery(id, query); + if (randomBoolean()) { + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.MUST); + } + return builder.build(); + }); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + if (randomBoolean()) { + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + } + return builder.build(); + }); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + return builder.build(); + }); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + return builder.build(); + }); + queries.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4)); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + return builder.build(); + }); + queries.add((id) -> new MatchAllDocsQuery()); + queries.add((id) -> new MatchNoDocsQuery("no reason at all")); + + int numDocs = randomIntBetween(queries.size(), queries.size() * 3); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + addPercolatorQuery(id, queries.get(i % queries.size()).apply(id)); } indexWriter.close(); directoryReader = DirectoryReader.open(directory); IndexSearcher shardSearcher = newSearcher(directoryReader); + // Disable query cache, because ControlQuery cannot be cached... + shardSearcher.setQueryCache(null); - for (int i = 0; i < numQueries; i++) { - MemoryIndex memoryIndex = new MemoryIndex(); + for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); + MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", id, new WhitespaceAnalyzer()); duelRun(memoryIndex, shardSearcher); } + + MemoryIndex memoryIndex = new MemoryIndex(); + memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); + duelRun(memoryIndex, shardSearcher); + // Empty percolator doc: + memoryIndex = new MemoryIndex(); + duelRun(memoryIndex, shardSearcher); } public void testDuelSpecificQueries() throws Exception { @@ -312,6 +381,8 @@ public class PercolateQueryTests extends ESTestCase { indexWriter.close(); directoryReader = DirectoryReader.open(directory); IndexSearcher shardSearcher = newSearcher(directoryReader); + // Disable query cache, because ControlQuery cannot be cached... + shardSearcher.setQueryCache(null); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); @@ -332,37 +403,37 @@ public class PercolateQueryTests extends ESTestCase { } private void duelRun(MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { + boolean requireScore = randomBoolean(); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery.Builder builder1 = new PercolateQuery.Builder( + PercolateQuery.Builder builder = new PercolateQuery.Builder( "docType", queryStore, new BytesArray("{}"), percolateSearcher ); // enables the optimization that prevents queries from being evaluated that don't match - builder1.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); - TopDocs topDocs1 = shardSearcher.search(builder1.build(), 10); + builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); + Query query = requireScore ? builder.build() : new ConstantScoreQuery(builder.build()); + TopDocs topDocs = shardSearcher.search(query, 10); - PercolateQuery.Builder builder2 = new PercolateQuery.Builder( - "docType", - queryStore, - new BytesArray("{}"), - percolateSearcher - ); - builder2.setPercolateTypeQuery(new MatchAllDocsQuery()); - TopDocs topDocs2 = shardSearcher.search(builder2.build(), 10); - assertThat(topDocs1.totalHits, equalTo(topDocs2.totalHits)); - assertThat(topDocs1.scoreDocs.length, equalTo(topDocs2.scoreDocs.length)); - for (int j = 0; j < topDocs1.scoreDocs.length; j++) { - assertThat(topDocs1.scoreDocs[j].doc, equalTo(topDocs2.scoreDocs[j].doc)); - assertThat(topDocs1.scoreDocs[j].score, equalTo(topDocs2.scoreDocs[j].score)); - Explanation explain1 = shardSearcher.explain(builder1.build(), topDocs1.scoreDocs[j].doc); - Explanation explain2 = shardSearcher.explain(builder2.build(), topDocs2.scoreDocs[j].doc); - assertThat(explain1.toHtml(), equalTo(explain2.toHtml())); + Query controlQuery = new ControlQuery(memoryIndex, queryStore); + controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery); + TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10); + assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits)); + assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); + for (int j = 0; j < topDocs.scoreDocs.length; j++) { + assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); + assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score)); + if (requireScore) { + Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc); + Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc); + assertThat(explain1.isMatch(), equalTo(explain2.isMatch())); + assertThat(explain1.getValue(), equalTo(explain2.getValue())); + } } } - private final static class CustomQuery extends Query { + private static final class CustomQuery extends Query { private final Term term; @@ -391,4 +462,89 @@ public class PercolateQueryTests extends ESTestCase { } } + private static final class ControlQuery extends Query { + + private final MemoryIndex memoryIndex; + private final PercolateQuery.QueryStore queryStore; + + private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore queryStore) { + this.memoryIndex = memoryIndex; + this.queryStore = queryStore; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) { + return new ConstantScoreWeight(this) { + + float _score; + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + Scorer scorer = scorer(context); + if (scorer != null) { + int result = scorer.iterator().advance(doc); + if (result == doc) { + return Explanation.match(scorer.score(), "ControlQuery"); + } + } + return Explanation.noMatch("ControlQuery"); + } + + @Override + public String toString() { + return "weight(" + ControlQuery.this + ")"; + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); + PercolateQuery.QueryStore.Leaf leaf = queryStore.getQueries(context); + FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) { + + @Override + protected boolean match(int doc) { + try { + Query query = leaf.getQuery(doc); + float score = memoryIndex.search(query); + if (score != 0f) { + if (needsScores) { + _score = score; + } + return true; + } else { + return false; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; + return new FilterScorer(new ConstantScoreScorer(this, score(), memoryIndexIterator)) { + + @Override + public float score() throws IOException { + return _score; + } + }; + } + }; + } + + @Override + public String toString(String field) { + return "control{" + field + "}"; + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } + + } + } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index 75d4f408774..f4a436e7c40 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -50,8 +50,8 @@ import static org.hamcrest.Matchers.notNullValue; public class PercolatorAggregationsIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java index 701fd1b660f..7a51d8a7ab2 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java @@ -48,7 +48,7 @@ import static org.hamcrest.Matchers.notNullValue; // Can'r run as IT as the test cluster is immutable and this test adds nodes during the test public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase { - private final static String INDEX_NAME = "percolator_index"; + private static final String INDEX_NAME = "percolator_index"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 5f8519db45a..3a3ddb01d2c 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -49,6 +49,8 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; +import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_COMPLETE; +import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_FAILED; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -71,7 +73,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { indexService = createIndex("test", Settings.EMPTY); mapperService = indexService.mapperService(); - String mapper = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_field_names").field("enabled", false).endObject() // makes testing easier + .startObject("properties") .startObject("field").field("type", "text").endObject() .startObject("number_field").field("type", "long").endObject() .startObject("date_field").field("type", "date").endObject() @@ -96,20 +100,21 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .field(fieldName, queryBuilder) .endObject().bytes()); - assertThat(doc.rootDoc().getFields(fieldType.getUnknownQueryFieldName()).length, equalTo(0)); assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField())[0].binaryValue().utf8ToString(), equalTo("field\0value")); assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_COMPLETE)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); // add an query for which we don't extract terms from - queryBuilder = matchAllQuery(); + queryBuilder = rangeQuery("field").from("a").to("z"); doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, queryBuilder) .endObject().bytes()); - assertThat(doc.rootDoc().getFields(fieldType.getUnknownQueryFieldName()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.getUnknownQueryFieldName())[0].binaryValue(), equalTo(new BytesRef())); + assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_FAILED)); assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(0)); assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); @@ -195,6 +200,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testMultiplePercolatorFields() throws Exception { String typeName = "another_type"; String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("query_field1").field("type", "percolator").endObject() .startObject("query_field2").field("type", "percolator").endObject() @@ -221,6 +227,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { public void testNestedPercolatorField() throws Exception { String typeName = "another_type"; String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) + .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("object_field") .field("type", "object") diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index ff9bf6634cd..e4e379c8f60 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -91,8 +91,8 @@ import static org.hamcrest.Matchers.nullValue; public class PercolatorIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index fc1a4206e82..5125a7ea5cc 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.percolator; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -37,7 +38,6 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.Collection; import java.util.Collections; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; @@ -49,6 +49,7 @@ import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -158,6 +159,29 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { assertThat(response.getHits().getAt(2).getId(), equalTo("3")); } + public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("type", "_source", "enabled=false", "field1", "type=keyword") + .addMapping("queries", "query", "type=percolator") + ); + + client().prepareIndex("test", "queries", "1") + .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()) + .get(); + + client().prepareIndex("test", "type", "1").setSource("{}").get(); + client().admin().indices().prepareRefresh().get(); + + logger.info("percolating empty doc with source disabled"); + Throwable e = expectThrows(SearchPhaseExecutionException.class, () -> { + client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", "test", "type", "1", null, null, null)) + .get(); + }).getRootCause(); + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("source disabled")); + } + public void testPercolatorSpecificQueries() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=text", "field2", "type=text") @@ -397,12 +421,16 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .addMapping("employee", mapping) .addMapping("queries", "query", "type=percolator") ); - client().prepareIndex("test", "queries", "q").setSource(jsonBuilder().startObject() + client().prepareIndex("test", "queries", "q1").setSource(jsonBuilder().startObject() .field("query", QueryBuilders.nestedQuery("employee", QueryBuilders.matchQuery("employee.name", "virginia potts").operator(Operator.AND), ScoreMode.Avg) ).endObject()) - .setRefreshPolicy(IMMEDIATE) .get(); + // this query should never match as it doesn't use nested query: + client().prepareIndex("test", "queries", "q2").setSource(jsonBuilder().startObject() + .field("query", QueryBuilders.matchQuery("employee.name", "virginia")).endObject()) + .get(); + client().admin().indices().prepareRefresh().get(); SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "employee", @@ -413,9 +441,10 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .startObject().field("name", "tony stark").endObject() .endArray() .endObject().bytes())) + .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q")); + assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "employee", @@ -426,12 +455,14 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { .startObject().field("name", "tony stark").endObject() .endArray() .endObject().bytes())) + .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 0); response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", "employee", XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject().bytes())) + .addSort("_doc", SortOrder.ASC) .get(); assertHitCount(response, 0); } diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 5bd5aeb3dc0..c6def124326 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -21,3 +21,41 @@ esplugin { description 'The Reindex module adds APIs to reindex from one index to another or update documents in place.' classname 'org.elasticsearch.index.reindex.ReindexPlugin' } + +integTest { + cluster { + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' + } +} + +run { + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' +} + + +dependencies { + compile "org.elasticsearch.client:rest:${version}" + // dependencies of the rest client + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-logging:commons-logging:${versions.commonslogging}" +} + +dependencyLicenses { + // Don't check the client's license. We know it. + dependencies = project.configurations.runtime.fileCollection { + it.group.startsWith('org.elasticsearch') == false + } - project.configurations.provided +} + +thirdPartyAudit.excludes = [ + // Commons logging + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/plugins/discovery-azure/licenses/commons-codec-1.10.jar.sha1 b/modules/reindex/licenses/commons-codec-1.10.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/commons-codec-1.10.jar.sha1 rename to modules/reindex/licenses/commons-codec-1.10.jar.sha1 diff --git a/plugins/discovery-azure/licenses/commons-codec-LICENSE.txt b/modules/reindex/licenses/commons-codec-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-codec-LICENSE.txt rename to modules/reindex/licenses/commons-codec-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/commons-codec-NOTICE.txt b/modules/reindex/licenses/commons-codec-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-codec-NOTICE.txt rename to modules/reindex/licenses/commons-codec-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/commons-logging-1.1.3.jar.sha1 b/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/commons-logging-1.1.3.jar.sha1 rename to modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 diff --git a/plugins/repository-azure/LICENSE.txt b/modules/reindex/licenses/commons-logging-LICENSE.txt similarity index 100% rename from plugins/repository-azure/LICENSE.txt rename to modules/reindex/licenses/commons-logging-LICENSE.txt index d6456956733..57bc88a15a0 100644 --- a/plugins/repository-azure/LICENSE.txt +++ b/modules/reindex/licenses/commons-logging-LICENSE.txt @@ -1,4 +1,3 @@ - Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -200,3 +199,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + diff --git a/modules/reindex/licenses/commons-logging-NOTICE.txt b/modules/reindex/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..72eb32a9024 --- /dev/null +++ b/modules/reindex/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons CLI +Copyright 2001-2009 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 b/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/httpclient-4.5.2.jar.sha1 rename to modules/reindex/licenses/httpclient-4.5.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/httpclient-LICENSE.txt b/modules/reindex/licenses/httpclient-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/httpclient-LICENSE.txt rename to modules/reindex/licenses/httpclient-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/httpclient-NOTICE.txt b/modules/reindex/licenses/httpclient-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/httpclient-NOTICE.txt rename to modules/reindex/licenses/httpclient-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 b/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/httpcore-4.4.4.jar.sha1 rename to modules/reindex/licenses/httpcore-4.4.4.jar.sha1 diff --git a/plugins/discovery-azure/licenses/httpcore-LICENSE.txt b/modules/reindex/licenses/httpcore-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/httpcore-LICENSE.txt rename to modules/reindex/licenses/httpcore-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/httpcore-NOTICE.txt b/modules/reindex/licenses/httpcore-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/httpcore-NOTICE.txt rename to modules/reindex/licenses/httpcore-NOTICE.txt diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index ceb00b2f81f..584dd022932 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -30,34 +29,24 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.ParentTaskAssigningClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.search.SearchHit; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; +import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; import static java.lang.Math.max; import static java.lang.Math.min; @@ -74,46 +63,57 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort; * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ public abstract class AbstractAsyncBulkByScrollAction> { + protected final ESLogger logger; + protected final BulkByScrollTask task; + protected final ThreadPool threadPool; /** * The request for this action. Named mainRequest because we create lots of request variables all representing child * requests of this mainRequest. */ protected final Request mainRequest; - protected final BulkByScrollTask task; private final AtomicLong startTime = new AtomicLong(-1); - private final AtomicReference scroll = new AtomicReference<>(); private final Set destinationIndices = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final ESLogger logger; private final ParentTaskAssigningClient client; - private final ThreadPool threadPool; - private final SearchRequest firstSearchRequest; private final ActionListener listener; - private final BackoffPolicy backoffPolicy; private final Retry bulkRetry; + private final ScrollableHitSource scrollSource; public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, - ActionListener listener) { + ThreadPool threadPool, Request mainRequest, ActionListener listener) { this.task = task; this.logger = logger; this.client = client; this.threadPool = threadPool; this.mainRequest = mainRequest; - this.firstSearchRequest = firstSearchRequest; this.listener = listener; - backoffPolicy = buildBackoffPolicy(); - bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(wrapBackoffPolicy(backoffPolicy)); + BackoffPolicy backoffPolicy = buildBackoffPolicy(); + bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.wrap(backoffPolicy, task::countBulkRetry)); + scrollSource = buildScrollableResultSource(backoffPolicy); + /* + * Default to sorting by doc. We can't do this in the request itself because it is normal to *add* to the sorts rather than replace + * them and if we add _doc as the first sort by default then sorts will never work.... So we add it here, only if there isn't + * another sort. + */ + List> sorts = mainRequest.getSearchRequest().source().sorts(); + if (sorts == null || sorts.isEmpty()) { + mainRequest.getSearchRequest().source().sort(fieldSort("_doc")); + } } - protected abstract BulkRequest buildBulk(Iterable docs); + protected abstract BulkRequest buildBulk(Iterable docs); + + protected ScrollableHitSource buildScrollableResultSource(BackoffPolicy backoffPolicy) { + return new ClientScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, client, + mainRequest.getSearchRequest()); + } /** * Build the response for reindex actions. */ protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { + List searchFailures, boolean timedOut) { return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } @@ -126,50 +126,33 @@ public abstract class AbstractAsyncBulkByScrollAction onScrollResponse(timeValueNanos(System.nanoTime()), 0, response)); + } catch (Exception e) { + finishHim(e); } - searchWithRetry(listener -> client.search(firstSearchRequest, listener), (SearchResponse response) -> { - logger.debug("[{}] documents match query", response.getHits().getTotalHits()); - onScrollResponse(timeValueNanos(System.nanoTime()), 0, response); - }); } /** * Process a scroll response. * @param lastBatchStartTime the time when the last batch started. Used to calculate the throttling delay. * @param lastBatchSize the size of the last batch. Used to calculate the throttling delay. - * @param searchResponse the scroll response to process + * @param response the scroll response to process */ - void onScrollResponse(TimeValue lastBatchStartTime, int lastBatchSize, SearchResponse searchResponse) { + void onScrollResponse(TimeValue lastBatchStartTime, int lastBatchSize, ScrollableHitSource.Response response) { if (task.isCancelled()) { finishHim(null); return; } - setScroll(searchResponse.getScrollId()); if ( // If any of the shards failed that should abort the request. - (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) + (response.getFailures().size() > 0) // Timeouts aren't shard failures but we still need to pass them back to the user. - || searchResponse.isTimedOut() + || response.isTimedOut() ) { - startNormalTermination(emptyList(), unmodifiableList(Arrays.asList(searchResponse.getShardFailures())), - searchResponse.isTimedOut()); + refreshAndFinish(emptyList(), response.getFailures(), response.isTimedOut()); return; } - long total = searchResponse.getHits().totalHits(); + long total = response.getTotalHits(); if (mainRequest.getSize() > 0) { total = min(total, mainRequest.getSize()); } @@ -181,12 +164,12 @@ public abstract class AbstractAsyncBulkByScrollAction docsIterable = Arrays.asList(docs); + List hits = response.getHits(); if (mainRequest.getSize() != SIZE_ALL_MATCHES) { - // Truncate the docs if we have more than the request size + // Truncate the hits if we have more than the request size long remaining = max(0, mainRequest.getSize() - task.getSuccessfullyProcessed()); - if (remaining < docs.length) { - docsIterable = docsIterable.subList(0, (int) remaining); + if (remaining < hits.size()) { + hits = hits.subList(0, (int) remaining); } } - BulkRequest request = buildBulk(docsIterable); + BulkRequest request = buildBulk(hits); if (request.requests().isEmpty()) { /* * If we noop-ed the entire batch then just skip to the next batch or the BulkRequest would fail validation. @@ -250,7 +231,7 @@ public abstract class AbstractAsyncBulkByScrollAction= mainRequest.getSize()) { // We've processed all the requested docs. - startNormalTermination(emptyList(), emptyList(), false); + refreshAndFinish(emptyList(), emptyList(), false); return; } startNextScroll(thisBatchStartTime, response.getItems().length); - } catch (Throwable t) { + } catch (Exception t) { finishHim(t); } } @@ -324,11 +305,8 @@ public abstract class AbstractAsyncBulkByScrollAction client.searchScroll(request, listener), (SearchResponse response) -> { + TimeValue extraKeepAlive = task.throttleWaitTime(lastBatchStartTime, lastBatchSize); + scrollSource.startNextScroll(extraKeepAlive, response -> { onScrollResponse(lastBatchStartTime, lastBatchSize, response); }); } @@ -344,9 +322,10 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { + void refreshAndFinish(List indexingFailures, List searchFailures, boolean timedOut) { if (task.isCancelled() || false == mainRequest.isRefresh() || destinationIndices.isEmpty()) { finishHim(null, indexingFailures, searchFailures, timedOut); return; @@ -360,7 +339,7 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { - String scrollId = scroll.get(); - if (Strings.hasLength(scrollId)) { - /* - * Fire off the clear scroll but don't wait for it it return before - * we send the use their response. - */ - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - /* - * Unwrap the client so we don't set our task as the parent. If we *did* set our ID then the clear scroll would be cancelled as - * if this task is cancelled. But we want to clear the scroll regardless of whether or not the main request was cancelled. - */ - client.unwrap().clearScroll(clearScrollRequest, new ActionListener() { - @Override - public void onResponse(ClearScrollResponse response) { - logger.debug("Freed [{}] contexts", response.getNumFreed()); - } - - @Override - public void onFailure(Throwable e) { - logger.warn("Failed to clear scroll [" + scrollId + ']', e); - } - }); - } + void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { + scrollSource.close(); if (failure == null) { listener.onResponse( buildResponse(timeValueNanos(System.nanoTime() - startTime.get()), indexingFailures, searchFailures, timedOut)); @@ -435,75 +390,6 @@ public abstract class AbstractAsyncBulkByScrollAction iterator() { - return new Iterator() { - private final Iterator delegate = backoffPolicy.iterator(); - @Override - public boolean hasNext() { - return delegate.hasNext(); - } - - @Override - public TimeValue next() { - if (false == delegate.hasNext()) { - return null; - } - task.countBulkRetry(); - return delegate.next(); - } - }; - } - }; - } - - /** - * Run a search action and call onResponse when a the response comes in, retrying if the action fails with an exception caused by - * rejected execution. - * - * @param action consumes a listener and starts the action. The listener it consumes is rigged to retry on failure. - * @param onResponse consumes the response from the action - */ - private void searchWithRetry(Consumer> action, Consumer onResponse) { - class RetryHelper extends AbstractRunnable implements ActionListener { - private final Iterator retries = backoffPolicy.iterator(); - - @Override - public void onResponse(T response) { - onResponse.accept(response); - } - - @Override - protected void doRun() throws Exception { - action.accept(this); - } - - @Override - public void onFailure(Throwable e) { - if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { - if (retries.hasNext()) { - TimeValue delay = retries.next(); - logger.trace("retrying rejected search after [{}]", e, delay); - threadPool.schedule(delay, ThreadPool.Names.SAME, this); - task.countSearchRetry(); - } else { - logger.warn("giving up on search because we retried {} times without success", e, retries); - finishHim(e); - } - } else { - logger.warn("giving up on search because it failed with a non-retryable exception", e); - finishHim(e); - } - } - } - new RetryHelper().run(); + scrollSource.setScroll(scroll); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java index 6cb62207506..4b87df46312 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.logging.ESLogger; @@ -44,8 +43,6 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; @@ -72,13 +69,13 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> scriptApplier; + private final BiFunction, ScrollableHitSource.Hit, RequestWrapper> scriptApplier; public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ThreadPool threadPool, Request mainRequest, ActionListener listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, mainRequest, firstSearchRequest, listener); + super(task, logger, client, threadPool, mainRequest, listener); this.scriptService = scriptService; this.clusterState = clusterState; this.scriptApplier = Objects.requireNonNull(buildScriptApplier(), "script applier must not be null"); @@ -87,15 +84,15 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> buildScriptApplier() { + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { // The default script applier executes a no-op return (request, searchHit) -> request; } @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { BulkRequest bulkRequest = new BulkRequest(); - for (SearchHit doc : docs) { + for (ScrollableHitSource.Hit doc : docs) { if (accept(doc)) { RequestWrapper request = scriptApplier.apply(copyMetadata(buildRequest(doc), doc), doc); if (request != null) { @@ -111,14 +108,14 @@ public abstract class AbstractAsyncBulkIndexByScrollAction buildRequest(SearchHit doc); + protected abstract RequestWrapper buildRequest(ScrollableHitSource.Hit doc); /** * Copies the metadata from a hit to the request. */ - protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { - copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + protected RequestWrapper copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { + request.setParent(doc.getParent()); + copyRouting(request, doc.getRouting()); // Comes back as a Long but needs to be a string - Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); + Long timestamp = doc.getTimestamp(); if (timestamp != null) { request.setTimestamp(timestamp.toString()); } - Long ttl = fieldValue(doc, TTLFieldMapper.NAME); + Long ttl = doc.getTTL(); if (ttl != null) { request.setTtl(ttl); } return request; } - /** - * Copy the parent from a search hit to the request. - */ - protected void copyParent(RequestWrapper request, String parent) { - request.setParent(parent); - } - /** * Copy the routing from a search hit to the request. */ @@ -163,11 +153,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction T fieldValue(SearchHit doc, String fieldName) { - SearchHitField field = doc.field(fieldName); - return field == null ? null : field.value(); - } - /** * Wrapper for the {@link ActionRequest} that are used in this action class. */ @@ -435,52 +420,50 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> { + public abstract class ScriptApplier implements BiFunction, ScrollableHitSource.Hit, RequestWrapper> { private final BulkByScrollTask task; private final ScriptService scriptService; - private final ClusterState state; private final Script script; private final Map params; private ExecutableScript executable; private Map context; - public ScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + public ScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, Map params) { this.task = task; this.scriptService = scriptService; this.script = script; - this.state = state; this.params = params; } @Override @SuppressWarnings("unchecked") - public RequestWrapper apply(RequestWrapper request, SearchHit doc) { + public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hit doc) { if (script == null) { return request; } if (executable == null) { - CompiledScript compiled = scriptService.compile(script, ScriptContext.Standard.UPDATE, emptyMap(), state); + CompiledScript compiled = scriptService.compile(script, ScriptContext.Standard.UPDATE, emptyMap()); executable = scriptService.executable(compiled, params); } if (context == null) { context = new HashMap<>(); } - context.put(IndexFieldMapper.NAME, doc.index()); - context.put(TypeFieldMapper.NAME, doc.type()); - context.put(IdFieldMapper.NAME, doc.id()); + context.put(IndexFieldMapper.NAME, doc.getIndex()); + context.put(TypeFieldMapper.NAME, doc.getType()); + context.put(IdFieldMapper.NAME, doc.getId()); Long oldVersion = doc.getVersion(); context.put(VersionFieldMapper.NAME, oldVersion); - String oldParent = fieldValue(doc, ParentFieldMapper.NAME); + String oldParent = doc.getParent(); context.put(ParentFieldMapper.NAME, oldParent); - String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); + String oldRouting = doc.getRouting(); context.put(RoutingFieldMapper.NAME, oldRouting); - Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); + Long oldTimestamp = doc.getTimestamp(); context.put(TimestampFieldMapper.NAME, oldTimestamp); - Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); + Long oldTTL = doc.getTTL(); context.put(TTLFieldMapper.NAME, oldTTL); context.put(SourceFieldMapper.NAME, request.getSource()); @@ -503,15 +486,15 @@ public abstract class AbstractAsyncBulkIndexByScrollAction) resultCtx.remove(SourceFieldMapper.NAME)); Object newValue = context.remove(IndexFieldMapper.NAME); - if (false == doc.index().equals(newValue)) { + if (false == doc.getIndex().equals(newValue)) { scriptChangedIndex(request, newValue); } newValue = context.remove(TypeFieldMapper.NAME); - if (false == doc.type().equals(newValue)) { + if (false == doc.getType().equals(newValue)) { scriptChangedType(request, newValue); } newValue = context.remove(IdFieldMapper.NAME); - if (false == doc.id().equals(newValue)) { + if (false == doc.getId().equals(newValue)) { scriptChangedId(request, newValue); } newValue = context.remove(VersionFieldMapper.NAME); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index 284e51e054f..048e4208fea 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -52,10 +52,10 @@ public abstract class AbstractBaseReindexRestHandler< private final ClusterService clusterService; private final TA action; - protected AbstractBaseReindexRestHandler(Settings settings, Client client, - IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, - ClusterService clusterService, TA action) { - super(settings, client); + protected AbstractBaseReindexRestHandler(Settings settings, IndicesQueriesRegistry indicesQueriesRegistry, + AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, TA action) { + super(settings); this.indicesQueriesRegistry = indicesQueriesRegistry; this.aggParsers = aggParsers; this.suggesters = suggesters; @@ -63,7 +63,7 @@ public abstract class AbstractBaseReindexRestHandler< this.action = action; } - protected void handleRequest(RestRequest request, RestChannel channel, + public void handleRequest(RestRequest request, RestChannel channel, boolean includeCreated, boolean includeUpdated) throws IOException { // Build the internal request diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 926da3befdd..9e4d8fc6d4e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -50,10 +50,10 @@ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, TA extends TransportAction> extends AbstractBaseReindexRestHandler { - protected AbstractBulkByQueryRestHandler(Settings settings, Client client, IndicesQueriesRegistry indicesQueriesRegistry, + protected AbstractBulkByQueryRestHandler(Settings settings, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, TA action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); } protected void parseInternalRequest(Request internal, RestRequest restRequest, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequest.java index 13b8b9780b3..10fb0bc676e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequest.java @@ -74,7 +74,7 @@ public abstract class AbstractBulkIndexByScrollRequest indexingFailures; - private List searchFailures; + private List bulkFailures; + private List searchFailures; private boolean timedOut; public BulkIndexByScrollResponse() { } - public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List indexingFailures, - List searchFailures, boolean timedOut) { + public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List bulkFailures, + List searchFailures, boolean timedOut) { this.took = took; this.status = requireNonNull(status, "Null status not supported"); - this.indexingFailures = indexingFailures; + this.bulkFailures = bulkFailures; this.searchFailures = searchFailures; this.timedOut = timedOut; } @@ -113,17 +110,16 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont } /** - * All of the indexing failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the - * default). + * All of the bulk failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the default). */ - public List getIndexingFailures() { - return indexingFailures; + public List getBulkFailures() { + return bulkFailures; } /** * All search failures. */ - public List getSearchFailures() { + public List getSearchFailures() { return searchFailures; } @@ -139,14 +135,8 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont super.writeTo(out); took.writeTo(out); status.writeTo(out); - out.writeVInt(indexingFailures.size()); - for (Failure failure: indexingFailures) { - failure.writeTo(out); - } - out.writeVInt(searchFailures.size()); - for (ShardSearchFailure failure: searchFailures) { - failure.writeTo(out); - } + out.writeList(bulkFailures); + out.writeList(searchFailures); out.writeBoolean(timedOut); } @@ -155,19 +145,9 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont super.readFrom(in); took = new TimeValue(in); status = new BulkByScrollTask.Status(in); - int indexingFailuresCount = in.readVInt(); - List indexingFailures = new ArrayList<>(indexingFailuresCount); - for (int i = 0; i < indexingFailuresCount; i++) { - indexingFailures.add(new Failure(in)); - } - this.indexingFailures = unmodifiableList(indexingFailures); - int searchFailuresCount = in.readVInt(); - List searchFailures = new ArrayList<>(searchFailuresCount); - for (int i = 0; i < searchFailuresCount; i++) { - searchFailures.add(readShardSearchFailure(in)); - } - this.searchFailures = unmodifiableList(searchFailures); - this.timedOut = in.readBoolean(); + bulkFailures = in.readList(Failure::new); + searchFailures = in.readList(SearchFailure::new); + timedOut = in.readBoolean(); } @Override @@ -176,15 +156,13 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont builder.field("timed_out", timedOut); status.innerXContent(builder, params); builder.startArray("failures"); - for (Failure failure: indexingFailures) { + for (Failure failure: bulkFailures) { builder.startObject(); failure.toXContent(builder, params); builder.endObject(); } - for (ShardSearchFailure failure: searchFailures) { - builder.startObject(); + for (SearchFailure failure: searchFailures) { failure.toXContent(builder, params); - builder.endObject(); } builder.endArray(); return builder; @@ -197,7 +175,7 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont builder.append("took=").append(took).append(','); builder.append("timed_out=").append(timedOut).append(','); status.innerToString(builder); - builder.append(",indexing_failures=").append(getIndexingFailures().subList(0, min(3, getIndexingFailures().size()))); + builder.append(",bulk_failures=").append(getBulkFailures().subList(0, min(3, getBulkFailures().size()))); builder.append(",search_failures=").append(getSearchFailures().subList(0, min(3, getSearchFailures().size()))); return builder.append(']').toString(); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 72bf6957e12..6cfba3a302d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -21,9 +21,9 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; @@ -61,13 +61,13 @@ public class BulkIndexByScrollResponseContentListener status.getStatus()) { status = failure.getStatus(); } } - for (ShardSearchFailure failure: response.getSearchFailures()) { - RestStatus failureStatus = ExceptionsHelper.status(failure.getCause()); + for (SearchFailure failure: response.getSearchFailures()) { + RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); if (failureStatus.getStatus() > status.getStatus()) { status = failureStatus; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java new file mode 100644 index 00000000000..5e694e2cf26 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -0,0 +1,251 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.index.mapper.internal.TTLFieldMapper; +import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.emptyList; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; +import static org.elasticsearch.common.util.CollectionUtils.isEmpty; + +/** + * A scrollable source of hits from a {@linkplain Client} instance. + */ +public class ClientScrollableHitSource extends ScrollableHitSource { + private final ParentTaskAssigningClient client; + private final SearchRequest firstSearchRequest; + + public ClientScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail, ParentTaskAssigningClient client, SearchRequest firstSearchRequest) { + super(logger, backoffPolicy, threadPool, countSearchRetry, fail); + this.client = client; + this.firstSearchRequest = firstSearchRequest; + } + + @Override + public void doStart(Consumer onResponse) { + if (logger.isDebugEnabled()) { + logger.debug("executing initial scroll against {}{}", + isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(), + isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types()); + } + searchWithRetry(listener -> client.search(firstSearchRequest, listener), r -> consume(r, onResponse)); + } + + @Override + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { + SearchScrollRequest request = new SearchScrollRequest(); + // Add the wait time into the scroll timeout so it won't timeout while we wait for throttling + request.scrollId(scrollId).scroll(timeValueNanos(firstSearchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())); + searchWithRetry(listener -> client.searchScroll(request, listener), r -> consume(r, onResponse)); + } + + @Override + public void clearScroll(String scrollId) { + /* + * Fire off the clear scroll but don't wait for it it return before + * we send the use their response. + */ + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(scrollId); + /* + * Unwrap the client so we don't set our task as the parent. If we *did* set our ID then the clear scroll would be cancelled as + * if this task is cancelled. But we want to clear the scroll regardless of whether or not the main request was cancelled. + */ + client.unwrap().clearScroll(clearScrollRequest, new ActionListener() { + @Override + public void onResponse(ClearScrollResponse response) { + logger.debug("Freed [{}] contexts", response.getNumFreed()); + } + + @Override + public void onFailure(Exception e) { + logger.warn("Failed to clear scroll [{}]", e, scrollId); + } + }); + } + + /** + * Run a search action and call onResponse when a the response comes in, retrying if the action fails with an exception caused by + * rejected execution. + * + * @param action consumes a listener and starts the action. The listener it consumes is rigged to retry on failure. + * @param onResponse consumes the response from the action + */ + private void searchWithRetry(Consumer> action, Consumer onResponse) { + /* + * RetryHelper is both an AbstractRunnable and an ActionListener - meaning that it both starts the search and + * handles reacts to the results. The complexity is all in onFailure which either adapts the failure to the "fail" listener or + * retries the search. Since both AbstractRunnable and ActionListener define the onFailure method it is called for either failure + * to run the action (either while running or before starting) and for failure on the response from the action. + */ + class RetryHelper extends AbstractRunnable implements ActionListener { + private final Iterator retries = backoffPolicy.iterator(); + private volatile int retryCount = 0; + + @Override + protected void doRun() throws Exception { + action.accept(this); + } + + @Override + public void onResponse(SearchResponse response) { + onResponse.accept(response); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { + if (retries.hasNext()) { + retryCount += 1; + TimeValue delay = retries.next(); + logger.trace("retrying rejected search after [{}]", e, delay); + countSearchRetry.run(); + threadPool.schedule(delay, ThreadPool.Names.SAME, this); + } else { + logger.warn("giving up on search because we retried [{}] times without success", e, retryCount); + fail.accept(e); + } + } else { + logger.warn("giving up on search because it failed with a non-retryable exception", e); + fail.accept(e); + } + } + } + new RetryHelper().run(); + } + + private void consume(SearchResponse response, Consumer onResponse) { + onResponse.accept(wrap(response)); + } + + private Response wrap(SearchResponse response) { + List failures; + if (response.getShardFailures() == null) { + failures = emptyList(); + } else { + failures = new ArrayList<>(response.getShardFailures().length); + for (ShardSearchFailure failure: response.getShardFailures()) { + String nodeId = failure.shard() == null ? null : failure.shard().nodeId(); + failures.add(new SearchFailure(failure.getCause(), failure.index(), failure.shardId(), nodeId)); + } + } + List hits; + if (response.getHits().getHits() == null || response.getHits().getHits().length == 0) { + hits = emptyList(); + } else { + hits = new ArrayList<>(response.getHits().getHits().length); + for (SearchHit hit: response.getHits().getHits()) { + hits.add(new ClientHit(hit)); + } + hits = unmodifiableList(hits); + } + return new Response(response.isTimedOut(), failures, response.getHits().getTotalHits(), + hits, response.getScrollId()); + } + + private static class ClientHit implements Hit { + private final SearchHit delegate; + private final BytesReference source; + + public ClientHit(SearchHit delegate) { + this.delegate = delegate; + source = delegate.hasSource() ? null : delegate.getSourceRef(); + } + + @Override + public String getIndex() { + return delegate.getIndex(); + } + + @Override + public String getType() { + return delegate.getType(); + } + + @Override + public String getId() { + return delegate.getId(); + } + + @Override + public BytesReference getSource() { + return source; + } + + @Override + public long getVersion() { + return delegate.getVersion(); + } + + @Override + public String getParent() { + return fieldValue(ParentFieldMapper.NAME); + } + + @Override + public String getRouting() { + return fieldValue(RoutingFieldMapper.NAME); + } + + @Override + public Long getTimestamp() { + return fieldValue(TimestampFieldMapper.NAME); + } + + @Override + public Long getTTL() { + return fieldValue(TTLFieldMapper.NAME); + } + + private T fieldValue(String fieldName) { + SearchHitField field = delegate.field(fieldName); + return field == null ? null : field.value(); + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index d4c81177571..4f2cb2578ac 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -19,25 +19,42 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestHandler; -public class ReindexPlugin extends Plugin { +import java.util.Arrays; +import java.util.List; + +import static java.util.Collections.singletonList; + +public class ReindexPlugin extends Plugin implements ActionPlugin { public static final String NAME = "reindex"; - public void onModule(ActionModule actionModule) { - actionModule.registerAction(ReindexAction.INSTANCE, TransportReindexAction.class); - actionModule.registerAction(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class); - actionModule.registerAction(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class); - actionModule.registerAction(RethrottleAction.INSTANCE, TransportRethrottleAction.class); + @Override + public List, ? extends ActionResponse>> getActions() { + return Arrays.asList(new ActionHandler<>(ReindexAction.INSTANCE, TransportReindexAction.class), + new ActionHandler<>(UpdateByQueryAction.INSTANCE, TransportUpdateByQueryAction.class), + new ActionHandler<>(DeleteByQueryAction.INSTANCE, TransportDeleteByQueryAction.class), + new ActionHandler<>(RethrottleAction.INSTANCE, TransportRethrottleAction.class)); + } + + @Override + public List> getRestHandlers() { + return Arrays.asList(RestReindexAction.class, RestUpdateByQueryAction.class, RestDeleteByQueryAction.class, + RestRethrottleAction.class); } public void onModule(NetworkModule networkModule) { - networkModule.registerRestHandler(RestReindexAction.class); - networkModule.registerRestHandler(RestUpdateByQueryAction.class); - networkModule.registerRestHandler(RestDeleteByQueryAction.class); - networkModule.registerRestHandler(RestRethrottleAction.class); networkModule.registerTaskStatus(BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new); } + + @Override + public List> getSettings() { + return singletonList(TransportReindexAction.REMOTE_CLUSTER_WHITELIST); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 660815bbf52..8c11cd3430f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -27,11 +27,13 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import java.io.IOException; import java.util.Arrays; import java.util.List; +import static java.util.Collections.singletonList; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.VersionType.INTERNAL; @@ -48,6 +50,8 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest subRequests() { assert getSearchRequest() != null; assert getDestination() != null; + if (remoteInfo != null) { + return singletonList(getDestination()); + } return unmodifiableList(Arrays.asList(getSearchRequest(), getDestination())); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index 4f814dbc49d..1eadf2c15bc 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.index.reindex.remote.RemoteInfo; public class ReindexRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { @@ -67,4 +68,12 @@ public class ReindexRequestBuilder extends destination.setIndex(index).setType(type); return this; } + + /** + * Setup reindexing from a remote cluster. + */ + public ReindexRequestBuilder setRemoteInfo(RemoteInfo remoteInfo) { + request().setRemoteInfo(remoteInfo); + return this; + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java index bb894584c8b..7a1f466c3c0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -42,16 +42,16 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { @Inject - public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, + public RestDeleteByQueryAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, TransportDeleteByQueryAction action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); controller.registerHandler(POST, "/{index}/_delete_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { if (false == request.hasContent()) { throw new ElasticsearchException("_delete_by_query requires a request body"); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 22fcd390430..3bbfebf68a7 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -22,21 +22,26 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -48,36 +53,39 @@ import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import static java.util.Objects.requireNonNull; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.rest.RestRequest.Method.POST; /** - * Expose IndexBySearchRequest over rest. + * Expose reindex over rest. */ public class RestReindexAction extends AbstractBaseReindexRestHandler { - - private static final ObjectParser PARSER = new ObjectParser<>("reindex"); + static final ObjectParser PARSER = new ObjectParser<>("reindex"); + private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)"); static { - ObjectParser.Parser sourceParser = (parser, search, context) -> { - /* - * Extract the parameters that we need from the source sent to the parser. We could do away with this hack when search source - * has an ObjectParser. - */ + ObjectParser.Parser sourceParser = (parser, request, context) -> { + // Funky hack to work around Search not having a proper ObjectParser and us wanting to extract query if using remote. Map source = parser.map(); String[] indices = extractStringArray(source, "index"); if (indices != null) { - search.indices(indices); + request.getSearchRequest().indices(indices); } String[] types = extractStringArray(source, "type"); if (types != null) { - search.types(types); + request.getSearchRequest().types(types); } + request.setRemoteInfo(buildRemoteInfo(source)); XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); builder.map(source); try (XContentParser innerParser = parser.contentType().xContent().createParser(builder.bytes())) { - search.source().parseXContent(context.queryParseContext(innerParser), context.aggParsers, context.suggesters); + request.getSearchRequest().source().parseXContent(context.queryParseContext(innerParser), context.aggParsers, + context.suggesters); } }; @@ -94,7 +102,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler i.ttl(parseTimeValue(ttl, TimeValue.timeValueMillis(-1), "ttl").millis()), new ParseField("ttl")); - PARSER.declareField((p, v, c) -> sourceParser.parse(p, v.getSearchRequest(), c), new ParseField("source"), ValueType.OBJECT); + PARSER.declareField((p, v, c) -> sourceParser.parse(p, v, c), new ParseField("source"), ValueType.OBJECT); PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT); PARSER.declareInt(ReindexRequest::setSize, new ParseField("size")); PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p, c.getParseFieldMatcher())), new ParseField("script"), @@ -103,15 +111,15 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source) throws IOException { + @SuppressWarnings("unchecked") + Map remote = (Map) source.remove("remote"); + if (remote == null) { + return null; + } + String username = extractString(remote, "username"); + String password = extractString(remote, "password"); + String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster"); + Matcher hostMatcher = HOST_PATTERN.matcher(hostInRequest); + if (false == hostMatcher.matches()) { + throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port] but was [" + hostInRequest + "]"); + } + String scheme = hostMatcher.group("scheme"); + String host = hostMatcher.group("host"); + int port = Integer.parseInt(hostMatcher.group("port")); + if (false == remote.isEmpty()) { + throw new IllegalArgumentException( + "Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]"); + } + return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password); + } + /** * Yank a string array from a map. Emulates XContent's permissive String to * String array conversions. @@ -147,7 +178,32 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source, String name) { + Object value = source.remove(name); + if (value == null) { + return null; + } + if (value instanceof String) { + return (String) value; + } + throw new IllegalArgumentException("Expected [" + name + "] to be a string but was [" + value + "]"); + } + + private static BytesReference queryForRemote(Map source) throws IOException { + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + Object query = source.remove("query"); + if (query == null) { + return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes(); + } + if (!(query instanceof Map)) { + throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]"); + } + @SuppressWarnings("unchecked") + Map map = (Map) query; + return builder.map(map).bytes(); + } + + static class ReindexParseContext implements ParseFieldMatcherSupplier { private final IndicesQueriesRegistry indicesQueryRegistry; private final ParseFieldMatcher parseFieldMatcher; private final AggregatorParsers aggParsers; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java index 9841794ca2a..f35225452bd 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -36,14 +36,11 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.action.admin.cluster.node.tasks.RestListTasksAction.nodeSettingListener; public class RestRethrottleAction extends BaseRestHandler { - private final TransportRethrottleAction action; private final ClusterService clusterService; @Inject - public RestRethrottleAction(Settings settings, RestController controller, Client client, TransportRethrottleAction action, - ClusterService clusterService) { - super(settings, client); - this.action = action; + public RestRethrottleAction(Settings settings, RestController controller, ClusterService clusterService) { + super(settings); this.clusterService = clusterService; controller.registerHandler(POST, "/_update_by_query/{taskId}/_rethrottle", this); controller.registerHandler(POST, "/_delete_by_query/{taskId}/_rethrottle", this); @@ -51,7 +48,7 @@ public class RestRethrottleAction extends BaseRestHandler { } @Override - public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { + public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { RethrottleRequest internalRequest = new RethrottleRequest(); internalRequest.setTaskId(new TaskId(request.param("taskId"))); Float requestsPerSecond = AbstractBaseReindexRestHandler.parseRequestsPerSecond(request); @@ -60,6 +57,6 @@ public class RestRethrottleAction extends BaseRestHandler { } internalRequest.setRequestsPerSecond(requestsPerSecond); ActionListener listener = nodeSettingListener(clusterService, new RestToXContentListener<>(channel)); - action.execute(internalRequest, listener); + client.execute(RethrottleAction.INSTANCE, internalRequest, listener); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8eb0e30fe38..81bc0b5d27f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -42,16 +42,16 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { @Inject - public RestUpdateByQueryAction(Settings settings, RestController controller, Client client, + public RestUpdateByQueryAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, TransportUpdateByQueryAction action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); controller.registerHandler(POST, "/{index}/_update_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { handleRequest(request, channel, false, true); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java new file mode 100644 index 00000000000..b03496df7a7 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -0,0 +1,357 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static java.util.Objects.requireNonNull; + +/** + * A scrollable source of results. + */ +public abstract class ScrollableHitSource implements Closeable { + private final AtomicReference scrollId = new AtomicReference<>(); + + protected final ESLogger logger; + protected final BackoffPolicy backoffPolicy; + protected final ThreadPool threadPool; + protected final Runnable countSearchRetry; + protected final Consumer fail; + + public ScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail) { + this.logger = logger; + this.backoffPolicy = backoffPolicy; + this.threadPool = threadPool; + this.countSearchRetry = countSearchRetry; + this.fail = fail; + } + + public final void start(Consumer onResponse) { + doStart(response -> { + setScroll(response.getScrollId()); + logger.debug("scroll returned [{}] documents with a scroll id of [{}]", response.getHits().size(), response.getScrollId()); + onResponse.accept(response); + }); + } + protected abstract void doStart(Consumer onResponse); + + public final void startNextScroll(TimeValue extraKeepAlive, Consumer onResponse) { + doStartNextScroll(scrollId.get(), extraKeepAlive, response -> { + setScroll(response.getScrollId()); + onResponse.accept(response); + }); + } + protected abstract void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse); + + @Override + public void close() { + String scrollId = this.scrollId.get(); + if (Strings.hasLength(scrollId)) { + clearScroll(scrollId); + } + } + protected abstract void clearScroll(String scrollId); + + /** + * Set the id of the last scroll. Used for debugging. + */ + final void setScroll(String scrollId) { + this.scrollId.set(scrollId); + } + + /** + * Response from each scroll batch. + */ + public static class Response { + private final boolean timedOut; + private final List failures; + private final long totalHits; + private final List hits; + private final String scrollId; + + public Response(boolean timedOut, List failures, long totalHits, List hits, String scrollId) { + this.timedOut = timedOut; + this.failures = failures; + this.totalHits = totalHits; + this.hits = hits; + this.scrollId = scrollId; + } + + /** + * Did this batch time out? + */ + public boolean isTimedOut() { + return timedOut; + } + + /** + * Where there any search failures? + */ + public final List getFailures() { + return failures; + } + + /** + * What were the total number of documents matching the search? + */ + public long getTotalHits() { + return totalHits; + } + + /** + * The documents returned in this batch. + */ + public List getHits() { + return hits; + } + + /** + * The scroll id used to fetch the next set of documents. + */ + public String getScrollId() { + return scrollId; + } + } + + /** + * A document returned as part of the response. Think of it like {@link SearchHit} but with all the things reindex needs in convenient + * methods. + */ + public interface Hit { + String getIndex(); + String getType(); + String getId(); + long getVersion(); + /** + * The source of the hit. Returns null if the source didn't come back from the search, usually because it source wasn't stored at + * all. + */ + @Nullable BytesReference getSource(); + @Nullable String getParent(); + @Nullable String getRouting(); + @Nullable Long getTimestamp(); + @Nullable Long getTTL(); + } + + /** + * An implementation of {@linkplain Hit} that uses getters and setters. Primarily used for testing and {@link RemoteScrollableHitSource} + * . + */ + public static class BasicHit implements Hit { + private final String index; + private final String type; + private final String id; + private final long version; + + private BytesReference source; + private String parent; + private String routing; + private Long timestamp; + private Long ttl; + + public BasicHit(String index, String type, String id, long version) { + this.index = index; + this.type = type; + this.id = id; + this.version = version; + } + + @Override + public String getIndex() { + return index; + } + + @Override + public String getType() { + return type; + } + + @Override + public String getId() { + return id; + } + + @Override + public long getVersion() { + return version; + } + + @Override + public BytesReference getSource() { + return source; + } + + public BasicHit setSource(BytesReference source) { + this.source = source; + return this; + } + + @Override + public String getParent() { + return parent; + } + + public BasicHit setParent(String parent) { + this.parent = parent; + return this; + } + + @Override + public String getRouting() { + return routing; + } + + public BasicHit setRouting(String routing) { + this.routing = routing; + return this; + } + + @Override + public Long getTimestamp() { + return timestamp; + } + + public BasicHit setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + @Override + public Long getTTL() { + return ttl; + } + + public BasicHit setTTL(Long ttl) { + this.ttl = ttl; + return this; + } + } + + /** + * A failure during search. Like {@link ShardSearchFailure} but useful for reindex from remote as well. + */ + public static class SearchFailure implements Writeable, ToXContent { + private final Throwable reason; + @Nullable + private final String index; + @Nullable + private final Integer shardId; + @Nullable + private final String nodeId; + + public SearchFailure(Throwable reason, @Nullable String index, @Nullable Integer shardId, @Nullable String nodeId) { + this.index = index; + this.shardId = shardId; + this.reason = requireNonNull(reason, "reason cannot be null"); + this.nodeId = nodeId; + } + + /** + * Build a search failure that doesn't have shard information available. + */ + public SearchFailure(Throwable reason) { + this(reason, null, null, null); + } + + /** + * Read from a stream. + */ + public SearchFailure(StreamInput in) throws IOException { + reason = in.readException(); + index = in.readOptionalString(); + shardId = in.readOptionalVInt(); + nodeId = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeException(reason); + out.writeOptionalString(index); + out.writeOptionalVInt(shardId); + out.writeOptionalString(nodeId); + } + + public String getIndex() { + return index; + } + + public Integer getShardId() { + return shardId; + } + + public Throwable getReason() { + return reason; + } + + @Nullable + public String getNodeId() { + return nodeId; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (index != null) { + builder.field("index", index); + } + if (shardId != null) { + builder.field("shard", shardId); + } + if (nodeId != null) { + builder.field("node", nodeId); + } + builder.field("reason"); + { + builder.startObject(); + ElasticsearchException.toXContent(builder, params, reason); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 471bd066f94..c3847ab2125 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -31,10 +31,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -74,35 +71,35 @@ public class TransportDeleteByQueryAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected boolean accept(SearchHit doc) { + protected boolean accept(ScrollableHitSource.Hit doc) { // Delete-by-query does not require the source to delete a document // and the default implementation checks for it return true; } @Override - protected RequestWrapper buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { DeleteRequest delete = new DeleteRequest(); - delete.index(doc.index()); - delete.type(doc.type()); - delete.id(doc.id()); - delete.version(doc.version()); + delete.index(doc.getIndex()); + delete.type(doc.getType()); + delete.id(doc.getId()); + delete.version(doc.getVersion()); return wrap(delete); } /** - * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, SearchHit)} + * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, ScrollableHitSource.Hit)} * method that is much more Update/Reindex oriented and so also copies things like timestamp/ttl which we * don't care for a deletion. */ @Override - protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { - copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + protected RequestWrapper copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { + request.setParent(doc.getParent()); + request.setRouting(doc.getRouting()); return request; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index a49ba0a3b32..04ccfa1ba49 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -19,8 +19,10 @@ package org.elasticsearch.index.reindex; +import org.apache.http.HttpHost; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; @@ -28,51 +30,72 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpInfo; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; +import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; import static org.elasticsearch.index.VersionType.INTERNAL; public class TransportReindexAction extends HandledTransportAction { + public static final Setting> REMOTE_CLUSTER_WHITELIST = + Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + private final ClusterService clusterService; private final ScriptService scriptService; private final AutoCreateIndex autoCreateIndex; private final Client client; + private final Set remoteWhitelist; + private final NodeService nodeService; @Inject public TransportReindexAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, ScriptService scriptService, - AutoCreateIndex autoCreateIndex, Client client, TransportService transportService) { + AutoCreateIndex autoCreateIndex, Client client, TransportService transportService, NodeService nodeService) { super(settings, ReindexAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, ReindexRequest::new); this.clusterService = clusterService; this.scriptService = scriptService; this.autoCreateIndex = autoCreateIndex; this.client = client; + remoteWhitelist = new HashSet<>(REMOTE_CLUSTER_WHITELIST.get(settings)); + this.nodeService = nodeService; } @Override protected void doExecute(Task task, ReindexRequest request, ActionListener listener) { + checkRemoteWhitelist(request.getRemoteInfo()); ClusterState state = clusterService.state(); - validateAgainstAliases(request.getSearchRequest(), request.getDestination(), indexNameExpressionResolver, autoCreateIndex, state); + validateAgainstAliases(request.getSearchRequest(), request.getDestination(), request.getRemoteInfo(), indexNameExpressionResolver, + autoCreateIndex, state); ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); } @@ -82,15 +105,43 @@ public class TransportReindexAction extends HandledTransportAction whitelist, RemoteInfo remoteInfo, TransportAddress publishAddress) { + if (remoteInfo == null) return; + String check = remoteInfo.getHost() + ':' + remoteInfo.getPort(); + if (whitelist.contains(check)) return; + /* + * For testing we support the key "myself" to allow connecting to the local node. We can't just change the setting to include the + * local node because it is intentionally not a dynamic setting for security purposes. We can't use something like "localhost:9200" + * because we don't know up front which port we'll get because the tests bind to port 0. Instead we try to resolve it here, taking + * "myself" to mean "my published http address". + */ + if (whitelist.contains("myself") && publishAddress != null && publishAddress.toString().equals(check)) { + return; + } + throw new IllegalArgumentException('[' + check + "] not whitelisted in " + REMOTE_CLUSTER_WHITELIST.getKey()); + } + /** * Throws an ActionRequestValidationException if the request tries to index * back into the same index or into an index that points to two indexes. * This cannot be done during request validation because the cluster state * isn't available then. Package private for testing. */ - static String validateAgainstAliases(SearchRequest source, IndexRequest destination, + static void validateAgainstAliases(SearchRequest source, IndexRequest destination, RemoteInfo remoteInfo, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, ClusterState clusterState) { + if (remoteInfo != null) { + return; + } String target = destination.index(); if (false == autoCreateIndex.shouldAutoCreate(target, clusterState)) { /* @@ -107,7 +158,6 @@ public class TransportReindexAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + protected ScrollableHitSource buildScrollableResultSource(BackoffPolicy backoffPolicy) { + if (mainRequest.getRemoteInfo() != null) { + // NORELEASE track 500-level retries that are builtin to the client + RemoteInfo remoteInfo = mainRequest.getRemoteInfo(); + if (remoteInfo.getUsername() != null) { + // NORELEASE support auth + throw new UnsupportedOperationException("Auth is unsupported"); + } + RestClient restClient = RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) + .build(); + RemoteScrollableHitSource.AsyncClient client = new RemoteScrollableHitSource.AsynchronizingRestClient(threadPool, + restClient); + return new RemoteScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, client, + remoteInfo.getQuery(), mainRequest.getSearchRequest()); + } + return super.buildScrollableResultSource(backoffPolicy); + } + + @Override + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { Script script = mainRequest.getScript(); if (script != null) { - return new ReindexScriptApplier(task, scriptService, script, clusterState, script.getParams()); + return new ReindexScriptApplier(task, scriptService, script, script.getParams()); } return super.buildScriptApplier(); } @Override - protected RequestWrapper buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); // Copy the index from the request so we always write where it asked to write @@ -142,7 +211,7 @@ public class TransportReindexAction extends HandledTransportAction params) { - super(task, scriptService, script, state, params); + super(task, scriptService, script, params); } /* diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java index 2990461c0a9..0ef55fd58ff 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index 7459972ce64..3765c77927c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -85,36 +84,36 @@ public class TransportUpdateByQueryAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { Script script = mainRequest.getScript(); if (script != null) { - return new UpdateByQueryScriptApplier(task, scriptService, script, clusterState, script.getParams()); + return new UpdateByQueryScriptApplier(task, scriptService, script, script.getParams()); } return super.buildScriptApplier(); } @Override - protected RequestWrapper buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); - index.index(doc.index()); - index.type(doc.type()); - index.id(doc.id()); - index.source(doc.sourceRef()); + index.index(doc.getIndex()); + index.type(doc.getType()); + index.id(doc.getId()); + index.source(doc.getSource()); index.versionType(VersionType.INTERNAL); - index.version(doc.version()); + index.version(doc.getVersion()); index.setPipeline(mainRequest.getPipeline()); return wrap(index); } class UpdateByQueryScriptApplier extends ScriptApplier { - UpdateByQueryScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, ClusterState state, + UpdateByQueryScriptApplier(BulkByScrollTask task, ScriptService scriptService, Script script, Map params) { - super(task, scriptService, script, state, params); + super(task, scriptService, script, params); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java new file mode 100644 index 00000000000..89d6cb18401 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +import static java.util.Objects.requireNonNull; + +public class RemoteInfo implements Writeable { + private final String scheme; + private final String host; + private final int port; + private final BytesReference query; + private final String username; + private final String password; + + public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password) { + this.scheme = requireNonNull(scheme, "[scheme] must be specified to reindex from a remote cluster"); + this.host = requireNonNull(host, "[host] must be specified to reindex from a remote cluster"); + this.port = port; + this.query = requireNonNull(query, "[query] must be specified to reindex from a remote cluster"); + this.username = username; + this.password = password; + } + + /** + * Read from a stream. + */ + public RemoteInfo(StreamInput in) throws IOException { + scheme = in.readString(); + host = in.readString(); + port = in.readVInt(); + query = in.readBytesReference(); + username = in.readOptionalString(); + password = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(scheme); + out.writeString(host); + out.writeVInt(port); + out.writeBytesReference(query); + out.writeOptionalString(username); + out.writeOptionalString(password); + } + + public String getScheme() { + return scheme; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public BytesReference getQuery() { + return query; + } + + @Nullable + public String getUsername() { + return username; + } + + @Nullable + public String getPassword() { + return password; + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + if (false == "http".equals(scheme)) { + // http is the default so it isn't worth taking up space if it is the scheme + b.append("scheme=").append(scheme).append(' '); + } + b.append("host=").append(host).append(" port=").append(port).append(" query=").append(query.utf8ToString()); + if (username != null) { + b.append(" username=").append(username); + } + if (password != null) { + b.append(" password=<<>>"); + } + return b.toString(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java new file mode 100644 index 00000000000..00c9f0ae509 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; + +final class RemoteRequestBuilders { + private RemoteRequestBuilders() {} + + static String initialSearchPath(SearchRequest searchRequest) { + // It is nasty to build paths with StringBuilder but we'll be careful.... + StringBuilder path = new StringBuilder("/"); + addIndexesOrTypes(path, "Index", searchRequest.indices()); + addIndexesOrTypes(path, "Type", searchRequest.types()); + path.append("_search"); + return path.toString(); + } + + static Map initialSearchParams(SearchRequest searchRequest, Version remoteVersion) { + Map params = new HashMap<>(); + if (searchRequest.scroll() != null) { + params.put("scroll", searchRequest.scroll().keepAlive().toString()); + } + params.put("size", Integer.toString(searchRequest.source().size())); + if (searchRequest.source().version() == null || searchRequest.source().version() == true) { + // false is the only value that makes it false. Null defaults to true.... + params.put("version", null); + } + if (searchRequest.source().sorts() != null) { + boolean useScan = false; + // Detect if we should use search_type=scan rather than a sort + if (remoteVersion.before(Version.V_2_1_0)) { + for (SortBuilder sort : searchRequest.source().sorts()) { + if (sort instanceof FieldSortBuilder) { + FieldSortBuilder f = (FieldSortBuilder) sort; + if (f.getFieldName().equals(FieldSortBuilder.DOC_FIELD_NAME)) { + useScan = true; + break; + } + } + } + } + if (useScan) { + params.put("search_type", "scan"); + } else { + StringBuilder sorts = new StringBuilder(sortToUri(searchRequest.source().sorts().get(0))); + for (int i = 1; i < searchRequest.source().sorts().size(); i++) { + sorts.append(',').append(sortToUri(searchRequest.source().sorts().get(i))); + } + params.put("sorts", sorts.toString()); + } + } + if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().isEmpty()) { + StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().get(0)); + for (int i = 1; i < searchRequest.source().storedFields().size(); i++) { + fields.append(',').append(searchRequest.source().storedFields().get(i)); + } + String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields"; + params.put(storedFieldsParamName, fields.toString()); + } + return params; + } + + static HttpEntity initialSearchEntity(BytesReference query) { + try (XContentBuilder entity = JsonXContent.contentBuilder(); XContentParser queryParser = XContentHelper.createParser(query)) { + entity.startObject(); + entity.field("query"); + /* + * We're intentionally a bit paranoid here - copying the query as xcontent rather than writing a raw field. We don't want poorly + * written queries to escape. Ever. + */ + entity.copyCurrentStructure(queryParser); + XContentParser.Token shouldBeEof = queryParser.nextToken(); + if (shouldBeEof != null) { + throw new ElasticsearchException( + "query was more than a single object. This first token after the object is [" + shouldBeEof + "]"); + } + entity.endObject(); + BytesRef bytes = entity.bytes().toBytesRef(); + return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON); + } catch (IOException e) { + throw new ElasticsearchException("unexpected error building entity", e); + } + } + + private static void addIndexesOrTypes(StringBuilder path, String name, String[] indicesOrTypes) { + if (indicesOrTypes == null || indicesOrTypes.length == 0) { + return; + } + for (String indexOrType : indicesOrTypes) { + checkIndexOrType(name, indexOrType); + } + path.append(Strings.arrayToCommaDelimitedString(indicesOrTypes)).append('/'); + } + + private static void checkIndexOrType(String name, String indexOrType) { + if (indexOrType.indexOf(',') >= 0) { + throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]"); + } + if (indexOrType.indexOf('/') >= 0) { + throw new IllegalArgumentException(name + " containing [/] not supported but got [" + indexOrType + "]"); + } + } + + private static String sortToUri(SortBuilder sort) { + if (sort instanceof FieldSortBuilder) { + FieldSortBuilder f = (FieldSortBuilder) sort; + return f.getFieldName() + ":" + f.order(); + } + throw new IllegalArgumentException("Unsupported sort [" + sort + "]"); + } + + static String scrollPath() { + return "/_search/scroll"; + } + + static Map scrollParams(TimeValue keepAlive) { + return singletonMap("scroll", keepAlive.toString()); + } + + static HttpEntity scrollEntity(String scroll) { + return new StringEntity(scroll, ContentType.TEXT_PLAIN); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java new file mode 100644 index 00000000000..0a467593a2c --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -0,0 +1,301 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit; +import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; +import org.elasticsearch.index.reindex.ScrollableHitSource.Response; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; + +import java.io.IOException; +import java.util.List; +import java.util.function.BiFunction; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static java.util.Objects.requireNonNull; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Parsers to convert the response from the remote host into objects useful for {@link RemoteScrollableHitSource}. Lots of data is + * intentionally thrown on the floor because we don't need it but ObjectParser and friends are strict about blowing up when they see + * elements they don't understand. So you'll see a lot of BiConsumers that look like "(b, v) -> {}". That means "I don't care about the + * value here, just throw it away and don't blow up. + */ +final class RemoteResponseParsers { + private RemoteResponseParsers() {} + + /** + * Parser for an individual {@code hit} element. + */ + public static final ConstructingObjectParser HIT_PARSER = new ConstructingObjectParser<>("hit", + a -> { + int i = 0; + String index = (String) a[i++]; + String type = (String) a[i++]; + String id = (String) a[i++]; + long version = (long) a[i++]; + return new BasicHit(index, type, id, version); + }); + static { + HIT_PARSER.declareString(constructorArg(), new ParseField("_index")); + HIT_PARSER.declareString(constructorArg(), new ParseField("_type")); + HIT_PARSER.declareString(constructorArg(), new ParseField("_id")); + HIT_PARSER.declareLong(constructorArg(), new ParseField("_version")); + HIT_PARSER.declareObject(BasicHit::setSource, (p, s) -> { + try { + /* + * We spool the data from the remote back into xcontent so we can get bytes to send. There ought to be a better way but for + * now this should do. + */ + try (XContentBuilder b = JsonXContent.contentBuilder()) { + b.copyCurrentStructure(p); + return b.bytes(); + } + } catch (IOException e) { + throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e); + } + }, new ParseField("_source")); + HIT_PARSER.declareString(BasicHit::setRouting, new ParseField("_routing")); + HIT_PARSER.declareString(BasicHit::setParent, new ParseField("_parent")); + HIT_PARSER.declareLong(BasicHit::setTTL, new ParseField("_ttl")); + HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp")); + HIT_PARSER.declareField((b, v) -> {}, p -> null, new ParseField("_score"), ValueType.FLOAT_OR_NULL); + HIT_PARSER.declareStringArray((b, v) -> {}, new ParseField("sort")); + } + + /** + * Parser for the {@code hits} element. Parsed to an array of {@code [total (Long), hits (List)]}. + */ + public static final ConstructingObjectParser HITS_PARSER = new ConstructingObjectParser<>("hits", + a -> a); + static { + HITS_PARSER.declareLong(constructorArg(), new ParseField("total")); + HITS_PARSER.declareObjectArray(constructorArg(), HIT_PARSER, new ParseField("hits")); + HITS_PARSER.declareField((b, v) -> {}, p -> null, new ParseField("max_score"), ValueType.FLOAT_OR_NULL); + } + + /** + * Parser for {@code failed} shards in the {@code _shards} elements. + */ + public static final ConstructingObjectParser SEARCH_FAILURE_PARSER = + new ConstructingObjectParser<>("failure", a -> { + int i = 0; + String index = (String) a[i++]; + Integer shardId = (Integer) a[i++]; + String nodeId = (String) a[i++]; + Object reason = a[i++]; + + Throwable reasonThrowable; + if (reason instanceof String) { + reasonThrowable = new RuntimeException("Unknown remote exception with reason=[" + (String) reason + "]"); + } else { + reasonThrowable = (Throwable) reason; + } + return new SearchFailure(reasonThrowable, index, shardId, nodeId); + }); + static { + SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("index")); + SEARCH_FAILURE_PARSER.declareInt(optionalConstructorArg(), new ParseField("shard")); + SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("node")); + SEARCH_FAILURE_PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.START_OBJECT) { + return ThrowableBuilder.PARSER.apply(p, c); + } else { + return p.text(); + } + }, new ParseField("reason"), ValueType.OBJECT_OR_STRING); + SEARCH_FAILURE_PARSER.declareInt((b, v) -> {}, new ParseField("status")); + } + + /** + * Parser for the {@code _shards} element. Throws everything out except the errors array if there is one. If there isn't one then it + * parses to an empty list. + */ + public static final ConstructingObjectParser, ParseFieldMatcherSupplier> SHARDS_PARSER = + new ConstructingObjectParser<>("_shards", a -> { + @SuppressWarnings("unchecked") + List failures = (List) a[0]; + failures = failures == null ? emptyList() : failures; + return failures; + }); + static { + SHARDS_PARSER.declareObjectArray(optionalConstructorArg(), SEARCH_FAILURE_PARSER, new ParseField("failures")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("total")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("successful")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("failed")); + } + + public static final ConstructingObjectParser RESPONSE_PARSER = + new ConstructingObjectParser<>("search_response", a -> { + int i = 0; + Throwable catastrophicFailure = (Throwable) a[i++]; + if (catastrophicFailure != null) { + return new Response(false, singletonList(new SearchFailure(catastrophicFailure)), 0, emptyList(), null); + } + boolean timedOut = (boolean) a[i++]; + String scroll = (String) a[i++]; + Object[] hitsElement = (Object[]) a[i++]; + @SuppressWarnings("unchecked") + List failures = (List) a[i++]; + + long totalHits = 0; + List hits = emptyList(); + + // Pull apart the hits element if we got it + if (hitsElement != null) { + i = 0; + totalHits = (long) hitsElement[i++]; + @SuppressWarnings("unchecked") + List h = (List) hitsElement[i++]; + hits = h; + } + + return new Response(timedOut, failures, totalHits, hits, scroll); + }); + static { + RESPONSE_PARSER.declareObject(optionalConstructorArg(), ThrowableBuilder.PARSER, new ParseField("error")); + RESPONSE_PARSER.declareBoolean(optionalConstructorArg(), new ParseField("timed_out")); + RESPONSE_PARSER.declareString(optionalConstructorArg(), new ParseField("_scroll_id")); + RESPONSE_PARSER.declareObject(optionalConstructorArg(), HITS_PARSER, new ParseField("hits")); + RESPONSE_PARSER.declareObject(optionalConstructorArg(), SHARDS_PARSER, new ParseField("_shards")); + RESPONSE_PARSER.declareInt((b, v) -> {}, new ParseField("took")); + RESPONSE_PARSER.declareBoolean((b, v) -> {}, new ParseField("terminated_early")); + RESPONSE_PARSER.declareInt((b, v) -> {}, new ParseField("status")); + } + + /** + * Collects stuff about Throwables and attempts to rebuild them. + */ + public static class ThrowableBuilder { + public static final BiFunction PARSER; + static { + ObjectParser parser = new ObjectParser<>("reason", ThrowableBuilder::new); + PARSER = parser.andThen(ThrowableBuilder::build); + parser.declareString(ThrowableBuilder::setType, new ParseField("type")); + parser.declareString(ThrowableBuilder::setReason, new ParseField("reason")); + parser.declareObject(ThrowableBuilder::setCausedBy, PARSER, new ParseField("caused_by")); + + // So we can give a nice error for parsing exceptions + parser.declareInt(ThrowableBuilder::setLine, new ParseField("line")); + parser.declareInt(ThrowableBuilder::setColumn, new ParseField("col")); + + // So we don't blow up on search exceptions + parser.declareString((b, v) -> {}, new ParseField("phase")); + parser.declareBoolean((b, v) -> {}, new ParseField("grouped")); + parser.declareField((p, v, c) -> p.skipChildren(), new ParseField("failed_shards"), ValueType.OBJECT_ARRAY); + + // Just throw away the root_cause + parser.declareField((p, v, c) -> p.skipChildren(), new ParseField("root_cause"), ValueType.OBJECT_ARRAY); + } + + private String type; + private String reason; + private Integer line; + private Integer column; + private Throwable causedBy; + + public Throwable build() { + Throwable t = buildWithoutCause(); + if (causedBy != null) { + t.initCause(causedBy); + } + return t; + } + + private Throwable buildWithoutCause() { + requireNonNull(type, "[type] is required"); + requireNonNull(reason, "[reason] is required"); + switch (type) { + // Make some effort to use the right exceptions + case "es_rejected_execution_exception": + return new EsRejectedExecutionException(reason); + case "parsing_exception": + XContentLocation location = null; + if (line != null && column != null) { + location = new XContentLocation(line, column); + } + return new ParsingException(location, reason); + // But it isn't worth trying to get it perfect.... + default: + return new RuntimeException(type + ": " + reason); + } + } + + public void setType(String type) { + this.type = type; + } + public void setReason(String reason) { + this.reason = reason; + } + public void setLine(Integer line) { + this.line = line; + } + public void setColumn(Integer column) { + this.column = column; + } + public void setCausedBy(Throwable causedBy) { + this.causedBy = causedBy; + } + } + + /** + * Parses the {@code version} field of the main action. There are a surprising number of fields in this that we don't need! + */ + public static final ConstructingObjectParser VERSION_PARSER = new ConstructingObjectParser<>( + "version", a -> Version.fromString((String) a[0])); + static { + VERSION_PARSER.declareString(constructorArg(), new ParseField("number")); + VERSION_PARSER.declareBoolean((p, v) -> {}, new ParseField("snapshot_build")); + VERSION_PARSER.declareBoolean((p, v) -> {}, new ParseField("build_snapshot")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_hash")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_date")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_timestamp")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("lucene_version")); + } + + /** + * Parses the main action to return just the {@linkplain Version} that it returns. We throw everything else out. + */ + public static final ConstructingObjectParser MAIN_ACTION_PARSER = new ConstructingObjectParser<>( + "/", a -> (Version) a[0]); + static { + MAIN_ACTION_PARSER.declareBoolean((p, v) -> {}, new ParseField("ok")); + MAIN_ACTION_PARSER.declareInt((p, v) -> {}, new ParseField("status")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("cluster_name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("tagline")); + MAIN_ACTION_PARSER.declareObject(constructorArg(), VERSION_PARSER, new ParseField("version")); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java new file mode 100644 index 00000000000..62dbd59f80a --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -0,0 +1,242 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.reindex.ScrollableHitSource; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.BufferedInputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.util.Iterator; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Consumer; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchPath; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollPath; +import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.MAIN_ACTION_PARSER; +import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.RESPONSE_PARSER; + +public class RemoteScrollableHitSource extends ScrollableHitSource { + private final AsyncClient client; + private final BytesReference query; + private final SearchRequest searchRequest; + Version remoteVersion; + + public RemoteScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail, AsyncClient client, BytesReference query, SearchRequest searchRequest) { + super(logger, backoffPolicy, threadPool, countSearchRetry, fail); + this.query = query; + this.searchRequest = searchRequest; + this.client = client; + } + + @Override + public void close() { + try { + client.close(); + } catch (IOException e) { + fail.accept(new IOException("couldn't close the remote connection", e)); + } + } + + @Override + protected void doStart(Consumer onResponse) { + lookupRemoteVersion(version -> { + remoteVersion = version; + execute("POST", initialSearchPath(searchRequest), initialSearchParams(searchRequest, version), + initialSearchEntity(query), RESPONSE_PARSER, r -> onStartResponse(onResponse, r)); + }); + } + + void lookupRemoteVersion(Consumer onVersion) { + execute("GET", "", emptyMap(), null, MAIN_ACTION_PARSER, onVersion); + + } + + void onStartResponse(Consumer onResponse, Response response) { + if (Strings.hasLength(response.getScrollId()) && response.getHits().isEmpty()) { + logger.debug("First response looks like a scan response. Jumping right to the second. scroll=[{}]", response.getScrollId()); + doStartNextScroll(response.getScrollId(), timeValueMillis(0), onResponse); + } else { + onResponse.accept(response); + } + } + + @Override + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { + execute("POST", scrollPath(), scrollParams(timeValueNanos(searchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())), + scrollEntity(scrollId), RESPONSE_PARSER, onResponse); + } + + @Override + protected void clearScroll(String scrollId) { + // Need to throw out response.... + client.performRequest("DELETE", scrollPath(), emptyMap(), scrollEntity(scrollId), new ResponseListener() { + @Override + public void onResponse(InputStream response) { + logger.debug("Successfully cleared [{}]", scrollId); + } + + @Override + public void onRetryableFailure(Exception t) { + onFailure(t); + } + + @Override + public void onFailure(Exception t) { + logger.warn("Failed to clear scroll [{}]", t, scrollId); + } + }); + } + + void execute(String method, String uri, Map params, HttpEntity entity, + BiFunction parser, Consumer listener) { + class RetryHelper extends AbstractRunnable { + private final Iterator retries = backoffPolicy.iterator(); + + @Override + protected void doRun() throws Exception { + client.performRequest(method, uri, params, entity, new ResponseListener() { + @Override + public void onResponse(InputStream content) { + T response; + try { + XContent xContent = XContentFactory.xContentType(content).xContent(); + try(XContentParser xContentParser = xContent.createParser(content)) { + response = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); + } + } catch (IOException e) { + throw new ElasticsearchException("Error deserializing response", e); + } + listener.accept(response); + } + + @Override + public void onFailure(Exception e) { + fail.accept(e); + } + + @Override + public void onRetryableFailure(Exception t) { + if (retries.hasNext()) { + TimeValue delay = retries.next(); + logger.trace("retrying rejected search after [{}]", t, delay); + countSearchRetry.run(); + threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this); + } else { + fail.accept(t); + } + } + }); + } + + @Override + public void onFailure(Exception t) { + fail.accept(t); + } + } + new RetryHelper().run(); + } + + public interface AsyncClient extends Closeable { + void performRequest(String method, String uri, Map params, HttpEntity entity, ResponseListener listener); + } + + public interface ResponseListener extends ActionListener { + void onRetryableFailure(Exception t); + } + + public static class AsynchronizingRestClient implements AsyncClient { + private final ThreadPool threadPool; + private final RestClient restClient; + + public AsynchronizingRestClient(ThreadPool threadPool, RestClient restClient) { + this.threadPool = threadPool; + this.restClient = restClient; + } + + @Override + public void performRequest(String method, String uri, Map params, HttpEntity entity, + ResponseListener listener) { + /* + * We use the generic thread pool here because this client is blocking the generic thread pool is sized appropriately for some + * of the threads on it to be blocked, waiting on IO. It'd be a disaster if this ran on the listener thread pool, eating + * valuable threads needed to handle responses. Most other thread pool would probably not mind running this either, but the + * generic thread pool is the "most right" place for it to run. We could make our own thread pool for this but the generic + * thread pool already has plenty of capacity. + */ + threadPool.generic().execute(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + try (org.elasticsearch.client.Response response = restClient.performRequest(method, uri, params, entity)) { + InputStream markSupportedInputStream = new BufferedInputStream(response.getEntity().getContent()); + listener.onResponse(markSupportedInputStream); + } + } + + @Override + public void onFailure(Exception t) { + if (t instanceof ResponseException) { + ResponseException re = (ResponseException) t; + if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) { + listener.onRetryableFailure(t); + return; + } + } + listener.onFailure(t); + } + }); + } + + @Override + public void close() throws IOException { + restClient.close(); + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java index 0318e4ddb01..f8351b262fc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java @@ -22,21 +22,15 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.Index; import org.elasticsearch.index.reindex.AbstractAsyncBulkIndexByScrollAction.OpType; import org.elasticsearch.index.reindex.AbstractAsyncBulkIndexByScrollAction.RequestWrapper; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchHit; import org.junit.Before; import org.mockito.Matchers; -import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; @@ -63,9 +57,7 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< @SuppressWarnings("unchecked") protected > T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); - Map fields = new HashMap<>(); - InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), fields); - doc.shardTarget(new SearchShardTarget("nodeid", new Index("index", "uuid"), 1)); + ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); when(scriptService.executable(any(CompiledScript.class), Matchers.>any())) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 5a9976fc005..4cc10334223 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -20,16 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.mapper.internal.TTLFieldMapper; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.search.internal.InternalSearchHitField; -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< @@ -37,25 +28,19 @@ public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< Response extends BulkIndexByScrollResponse> extends AbstractAsyncBulkIndexByScrollActionTestCase { - /** - * Create a doc with some metadata. - */ - protected InternalSearchHit doc(String field, Object value) { - InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), singletonMap(field, - new InternalSearchHitField(field, singletonList(value)))); - doc.shardTarget(new SearchShardTarget("node", new Index("index", "uuid"), 0)); - return doc; + protected ScrollableHitSource.BasicHit doc() { + return new ScrollableHitSource.BasicHit("index", "type", "id", 0); } public void testTimestampIsCopied() { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TimestampFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTimestamp(10L)); assertEquals("10", index.timestamp()); } public void testTTL() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TTLFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTTL(10L)); assertEquals(timeValueMillis(10), index.ttl()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 14577afc140..e45e2bc53d5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -62,9 +62,10 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; @@ -95,6 +96,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; import static org.apache.lucene.util.TestUtil.randomSimpleString; import static org.elasticsearch.action.bulk.BackoffPolicy.constantBackoff; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -103,7 +105,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; -import static org.hamcrest.Matchers.emptyCollectionOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -155,7 +157,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * random scroll id so it is checked instead. */ private String scrollId() { - scrollId = randomSimpleString(random(), 1, 1000); // Empty strings get special behavior we don't want + scrollId = randomSimpleString(random(), 1, 10); // Empty strings get special behavior we don't want return scrollId; } @@ -216,10 +218,8 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { assertEquals(0, testTask.getStatus().getTotal()); long total = randomIntBetween(0, Integer.MAX_VALUE); - InternalSearchHits hits = new InternalSearchHits(null, total, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueSeconds(0), 0, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueSeconds(0), 0, response); assertEquals(total, testTask.getStatus().getTotal()); } @@ -229,12 +229,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testScrollResponseBatchingBehavior() throws Exception { int maxBatches = randomIntBetween(0, 100); for (int batches = 1; batches < maxBatches; batches++) { - InternalSearchHit hit = new InternalSearchHit(0, "id", new Text("type"), emptyMap()); - InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[] { hit }, 0, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); + Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); DummyAbstractAsyncBulkByScrollAction action = new DummyAbstractAsyncBulkByScrollAction(); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); // Use assert busy because the update happens on another thread final int expectedBatches = batches; @@ -318,16 +316,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { return null; } }; - InternalSearchHits hits = new InternalSearchHits(null, 0, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 10, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); - try { - listener.get(); - fail("Expected a failure"); - } catch (ExecutionException e) { - assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); - } + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 10, response); + ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); + assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); assertThat(client.scrollsCleared, contains(scrollId)); // When the task is rejected we don't increment the throttled timer @@ -339,12 +331,12 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * scroll request going down. */ public void testShardFailuresAbortRequest() throws Exception { - ShardSearchFailure shardFailure = new ShardSearchFailure(new RuntimeException("test")); - InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, false, null); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[] { shardFailure })); + SearchFailure shardFailure = new SearchFailure(new RuntimeException("test")); + ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(false, singletonList(shardFailure), 0, + emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 0, scrollResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); + assertThat(response.getBulkFailures(), empty()); assertThat(response.getSearchFailures(), contains(shardFailure)); assertFalse(response.isTimedOut()); assertNull(response.getReasonCancelled()); @@ -355,12 +347,11 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * Mimicks search timeouts. */ public void testSearchTimeoutsAbortRequest() throws Exception { - InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, true, null); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[0])); + ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(true, emptyList(), 0, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 0, scrollResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), empty()); + assertThat(response.getSearchFailures(), empty()); assertTrue(response.isTimedOut()); assertNull(response.getReasonCancelled()); assertThat(client.scrollsCleared, contains(scrollId)); @@ -375,8 +366,8 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { BulkResponse bulkResponse = new BulkResponse(new BulkItemResponse[] {new BulkItemResponse(0, "index", failure)}, randomLong()); action.onBulkResponse(timeValueNanos(System.nanoTime()), bulkResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), contains(failure)); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), contains(failure)); + assertThat(response.getSearchFailures(), empty()); assertNull(response.getReasonCancelled()); } @@ -386,15 +377,13 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testListenerReceiveBuildBulkExceptions() throws Exception { DummyAbstractAsyncBulkByScrollAction action = new DummyAbstractAsyncBulkByScrollAction() { @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { throw new RuntimeException("surprise"); } }; - InternalSearchHit hit = new InternalSearchHit(0, "id", new Text("type"), emptyMap()); - InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[] {hit}, 0, 0); - InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false); - SearchResponse searchResponse = new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), null); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 0, searchResponse); + Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); assertThat(e.getCause(), instanceOf(RuntimeException.class)); assertThat(e.getCause().getMessage(), equalTo("surprise")); @@ -503,9 +492,9 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { action.sendBulkRequest(timeValueNanos(System.nanoTime()), request); if (failWithRejection) { BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), hasSize(1)); - assertEquals(response.getIndexingFailures().get(0).getStatus(), RestStatus.TOO_MANY_REQUESTS); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), hasSize(1)); + assertEquals(response.getBulkFailures().get(0).getStatus(), RestStatus.TOO_MANY_REQUESTS); + assertThat(response.getSearchFailures(), empty()); assertNull(response.getReasonCancelled()); } else { successLatch.await(10, TimeUnit.SECONDS); @@ -553,7 +542,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { if (addDestinationIndexes) { action.addDestinationIndices(singleton("foo")); } - action.startNormalTermination(emptyList(), emptyList(), false); + action.refreshAndFinish(emptyList(), emptyList(), false); if (shouldRefresh) { assertArrayEquals(new String[] {"foo"}, client.lastRefreshRequest.get().indices()); } else { @@ -567,7 +556,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testCancelBeforeScrollResponse() throws Exception { // We bail so early we don't need to pass in a half way valid response. - cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.onScrollResponse(timeValueNanos(System.nanoTime()), 1, + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 1, null)); } @@ -586,10 +575,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNextScroll(timeValueNanos(System.nanoTime()), 0)); } - public void testCancelBeforeStartNormalTermination() throws Exception { + public void testCancelBeforeRefreshAndFinish() throws Exception { // Refresh or not doesn't matter - we don't try to refresh. testRequest.setRefresh(usually()); - cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNormalTermination(emptyList(), emptyList(), false)); + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.refreshAndFinish(emptyList(), emptyList(), false)); assertNull("No refresh was attempted", client.lastRefreshRequest.get()); } @@ -629,12 +618,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { action.setScroll(scrollId()); } long total = randomIntBetween(0, Integer.MAX_VALUE); - InternalSearchHits hits = new InternalSearchHits(null, total, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null); // Use a long delay here so the test will time out if the cancellation doesn't reschedule the throttled task - SearchResponse scrollResponse = new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null); testTask.rethrottle(1); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 1000, scrollResponse); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 1000, response); // Now that we've got our cancel we'll just verify that it all came through all right assertEquals(reason, listener.get(10, TimeUnit.SECONDS).getReasonCancelled()); @@ -660,23 +647,26 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { } } + /** + * Simulate a scroll response by setting the scroll id and firing the onScrollResponse method. + */ + private void simulateScrollResponse(DummyAbstractAsyncBulkByScrollAction action, TimeValue lastBatchTime, int lastBatchSize, + ScrollableHitSource.Response response) { + action.setScroll(scrollId()); + action.onScrollResponse(lastBatchTime, lastBatchSize, response); + } + private class DummyAbstractAsyncBulkByScrollAction extends AbstractAsyncBulkByScrollAction { public DummyAbstractAsyncBulkByScrollAction() { - super(testTask, logger, new ParentTaskAssigningClient(client, localNode, testTask), threadPool, testRequest, firstSearchRequest, - listener); + super(testTask, AsyncBulkByScrollActionTests.this.logger, new ParentTaskAssigningClient(client, localNode, testTask), + AsyncBulkByScrollActionTests.this.threadPool, testRequest, listener); } @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { return new BulkRequest(); } - - @Override - protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); - } } /** @@ -821,7 +811,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { super.doExecute(action, request, listener); } - private Throwable wrappedRejectedException() { + private Exception wrappedRejectedException() { Exception e = new EsRejectedExecutionException(); int wraps = randomIntBetween(0, 4); for (int i = 0; i < wraps; i++) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java index 05699c6f7af..735c3aa5b64 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java @@ -190,8 +190,8 @@ public class BulkByScrollTaskTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - errors.add(t); + public void onFailure(Exception e) { + errors.add(e); } }); @@ -271,7 +271,7 @@ public class BulkByScrollTaskTests extends ESTestCase { protected void doRun() throws Exception { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { throw new UnsupportedOperationException(); } }); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java index 4ef16c59141..c0c06b14d55 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java @@ -124,7 +124,7 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher whitelist = randomWhitelist(); + String[] inList = whitelist.iterator().next().split(":"); + String host = inList[0]; + int port = Integer.valueOf(inList[1]); + checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), host, port, new BytesArray("test"), null, null), + localhostOrNone()); + } + + public void testMyselfInWhitelistRemote() throws UnknownHostException { + Set whitelist = randomWhitelist(); + whitelist.add("myself"); + TransportAddress publishAddress = new InetSocketTransportAddress(InetAddress.getByAddress(new byte[] {0x7f,0x00,0x00,0x01}), 9200); + checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), "127.0.0.1", 9200, new BytesArray("test"), null, null), + publishAddress); + } + + public void testUnwhitelistedRemote() { + int port = between(1, Integer.MAX_VALUE); + Exception e = expectThrows(IllegalArgumentException.class, () -> checkRemoteWhitelist(randomWhitelist(), + new RemoteInfo(randomAsciiOfLength(5), "not in list", port, new BytesArray("test"), null, null), localhostOrNone())); + assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); + } + + private Set randomWhitelist() { + int size = between(1, 100); + Set set = new HashSet<>(size); + while (set.size() < size) { + set.add(randomAsciiOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); + } + return set; + } + + private TransportAddress localhostOrNone() { + return randomFrom(random(), null, localhost); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index 670fcefbf55..dab0cab8d8a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; /** * Index-by-search test for ttl, timestamp, and routing. @@ -29,7 +28,7 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingCopiedByDefault() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } @@ -37,7 +36,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("keep"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } @@ -45,7 +44,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("discard"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals(null, index.routing()); } @@ -53,7 +52,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("=cat"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("cat", index.routing()); } @@ -61,7 +60,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("==]"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("=]", index.routing()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java new file mode 100644 index 00000000000..efaf5e627ad --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; + +/** + * Tests some of the validation of {@linkplain ReindexRequest}. See reindex's rest tests for much more. + */ +public class ReindexRequestTests extends ESTestCase { + public void testTimestampAndTtlNotAllowed() { + ReindexRequest reindex = request(); + reindex.getDestination().ttl("1s").timestamp("now"); + ActionRequestValidationException e = reindex.validate(); + assertEquals("Validation Failed: 1: setting ttl on destination isn't supported. use scripts instead.;" + + "2: setting timestamp on destination isn't supported. use scripts instead.;", + e.getMessage()); + } + + public void testReindexFromRemoteDoesNotSupportSearchQuery() { + ReindexRequest reindex = request(); + reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), between(1, Integer.MAX_VALUE), + new BytesArray("real_query"), null, null)); + reindex.getSearchRequest().source().query(matchAllQuery()); // Unsupported place to put query + ActionRequestValidationException e = reindex.validate(); + assertEquals("Validation Failed: 1: reindex from remote sources should use RemoteInfo's query instead of source's query;", + e.getMessage()); + } + + private ReindexRequest request() { + ReindexRequest reindex = new ReindexRequest(new SearchRequest(), new IndexRequest()); + reindex.getSearchRequest().indices("source"); + reindex.getDestination().index("dest"); + return reindex; + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index 74b7548cd63..c70b80b8e37 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -106,7 +105,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScri } public void testSetTimestamp() throws Exception { - String timestamp = randomFrom(null, "now", "1234"); + String timestamp = randomFrom("now", "1234", null); IndexRequest index = applyScript((Map ctx) -> ctx.put("_timestamp", timestamp)); assertEquals(timestamp, index.timestamp()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java similarity index 62% rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java rename to modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index f1218414af7..66896406c66 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -30,15 +30,20 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; /** - * Tests that indexing from an index back into itself fails the request. + * Tests source and target index validation of reindex. Mostly that means testing that indexing from an index back into itself fails the + * request. Note that we can't catch you trying to remotely reindex from yourself into yourself. We actually assert here that reindexes + * from remote don't need to come from existing indexes. It'd be silly to fail requests if the source index didn't exist on the target + * cluster.... */ -public class ReindexSameIndexTests extends ESTestCase { +public class ReindexSourceTargetValidationTests extends ESTestCase { private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder() .put(index("target", "target_alias", "target_multi"), true) .put(index("target2", "target_multi"), true) @@ -50,7 +55,7 @@ public class ReindexSameIndexTests extends ESTestCase { private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver(Settings.EMPTY); private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY, INDEX_NAME_EXPRESSION_RESOLVER); - public void testObviousCases() throws Exception { + public void testObviousCases() { fails("target", "target"); fails("target", "foo", "bar", "target", "baz"); fails("target", "foo", "bar", "target", "baz", "target"); @@ -58,7 +63,7 @@ public class ReindexSameIndexTests extends ESTestCase { succeeds("target", "source", "source2"); } - public void testAliasesContainTarget() throws Exception { + public void testAliasesContainTarget() { fails("target", "target_alias"); fails("target_alias", "target"); fails("target", "foo", "bar", "target_alias", "baz"); @@ -71,31 +76,33 @@ public class ReindexSameIndexTests extends ESTestCase { succeeds("target", "source", "source2", "source_multi"); } - public void testTargetIsAlias() throws Exception { - try { - succeeds("target_multi", "foo"); - fail("Expected failure"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Alias [target_multi] has more than one indices associated with it [[")); - // The index names can come in either order - assertThat(e.getMessage(), containsString("target")); - assertThat(e.getMessage(), containsString("target2")); - } + public void testTargetIsAlias() { + Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_multi", "foo")); + assertThat(e.getMessage(), containsString("Alias [target_multi] has more than one indices associated with it [[")); + // The index names can come in either order + assertThat(e.getMessage(), containsString("target")); + assertThat(e.getMessage(), containsString("target2")); } - private void fails(String target, String... sources) throws Exception { - try { - succeeds(target, sources); - fail("Expected an exception"); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), - containsString("reindex cannot write into an index its reading from [target]")); - } + public void testRemoteInfoSkipsValidation() { + // The index doesn't have to exist + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "does_not_exist", "target"); + // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "target", "target"); } - private void succeeds(String target, String... sources) throws Exception { - TransportReindexAction.validateAgainstAliases(new SearchRequest(sources), new IndexRequest(target), INDEX_NAME_EXPRESSION_RESOLVER, - AUTO_CREATE_INDEX, STATE); + private void fails(String target, String... sources) { + Exception e = expectThrows(ActionRequestValidationException.class, () -> succeeds(target, sources)); + assertThat(e.getMessage(), containsString("reindex cannot write into an index its reading from [target]")); + } + + private void succeeds(String target, String... sources) { + succeeds(null, target, sources); + } + + private void succeeds(RemoteInfo remoteInfo, String target, String... sources) { + TransportReindexAction.validateAgainstAliases(new SearchRequest(sources), new IndexRequest(target), remoteInfo, + INDEX_NAME_EXPRESSION_RESOLVER, AUTO_CREATE_INDEX, STATE); } private static IndexMetaData index(String name, String... aliases) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java new file mode 100644 index 00000000000..1cbec59c49d --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.reindex.RestReindexAction.ReindexParseContext; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class RestReindexActionTests extends ESTestCase { + public void testBuildRemoteInfoNoRemote() throws IOException { + assertNull(RestReindexAction.buildRemoteInfo(new HashMap<>())); + } + + public void testBuildRemoteInfoFullyLoaded() throws IOException { + Map remote = new HashMap<>(); + remote.put("host", "https://example.com:9200"); + remote.put("username", "testuser"); + remote.put("password", "testpass"); + + Map query = new HashMap<>(); + query.put("a", "b"); + + Map source = new HashMap<>(); + source.put("remote", remote); + source.put("query", query); + + RemoteInfo remoteInfo = RestReindexAction.buildRemoteInfo(source); + assertEquals("https", remoteInfo.getScheme()); + assertEquals("example.com", remoteInfo.getHost()); + assertEquals(9200, remoteInfo.getPort()); + assertEquals("{\n \"a\" : \"b\"\n}", remoteInfo.getQuery().utf8ToString()); + assertEquals("testuser", remoteInfo.getUsername()); + assertEquals("testpass", remoteInfo.getPassword()); + } + + public void testBuildRemoteInfoWithoutAllParts() throws IOException { + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com")); + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com:9200")); + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("http://example.com")); + } + + public void testBuildRemoteInfoWithAllParts() throws IOException { + RemoteInfo info = buildRemoteInfoHostTestCase("http://example.com:9200"); + assertEquals("http", info.getScheme()); + assertEquals("example.com", info.getHost()); + assertEquals(9200, info.getPort()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + } + + public void testReindexFromRemoteRequestParsing() throws IOException { + BytesReference request; + try (XContentBuilder b = JsonXContent.contentBuilder()) { + b.startObject(); { + b.startObject("source"); { + b.startObject("remote"); { + b.field("host", "http://localhost:9200"); + } + b.endObject(); + b.field("index", "source"); + } + b.endObject(); + b.startObject("dest"); { + b.field("index", "dest"); + } + b.endObject(); + } + b.endObject(); + request = b.bytes(); + } + try (XContentParser p = JsonXContent.jsonXContent.createParser(request)) { + ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest()); + RestReindexAction.PARSER.parse(p, r, + new ReindexParseContext(new IndicesQueriesRegistry(), null, null, ParseFieldMatcher.STRICT)); + assertEquals("localhost", r.getRemoteInfo().getHost()); + assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices()); + } + } + + private RemoteInfo buildRemoteInfoHostTestCase(String hostInRest) throws IOException { + Map remote = new HashMap<>(); + remote.put("host", hostInRest); + + Map source = new HashMap<>(); + source.put("remote", remote); + + return RestReindexAction.buildRemoteInfo(source); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 09945c9372b..fd5ddaedd69 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -20,13 +20,18 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -39,6 +44,7 @@ import java.util.List; import java.util.concurrent.CyclicBarrier; import static org.elasticsearch.index.reindex.ReindexTestCase.matcher; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -68,6 +74,10 @@ public class RetryTests extends ESSingleNodeTestCase { // Use queues of size 1 because size 0 is broken and because search requests need the queue to function settings.put("thread_pool.bulk.queue_size", 1); settings.put("thread_pool.search.queue_size", 1); + // Enable http so we can test retries on reindex from remote. In this case the "remote" cluster is just this cluster. + settings.put(NetworkModule.HTTP_ENABLED.getKey(), true); + // Whitelist reindexing from the http host we're going to use + settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "myself"); return settings.build(); } @@ -97,6 +107,15 @@ public class RetryTests extends ESSingleNodeTestCase { matcher().created(DOC_COUNT)); } + public void testReindexFromRemote() throws Exception { + NodeInfo nodeInfo = client().admin().cluster().prepareNodesInfo().get().getNodes().get(0); + TransportAddress address = nodeInfo.getHttp().getAddress().publishAddress(); + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + testCase(ReindexAction.NAME, request, matcher().created(DOC_COUNT)); + } + public void testUpdateByQuery() throws Exception { testCase(UpdateByQueryAction.NAME, UpdateByQueryAction.INSTANCE.newRequestBuilder(client()).source("source"), matcher().updated(DOC_COUNT)); @@ -118,34 +137,41 @@ public class RetryTests extends ESSingleNodeTestCase { logger.info("Starting request"); ListenableActionFuture responseListener = request.execute(); - logger.info("Waiting for search rejections on the initial search"); - assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); + try { + logger.info("Waiting for search rejections on the initial search"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); - logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); - CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK); - initialSearchBlock.await(); + logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); + CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK); + initialSearchBlock.await(); - logger.info("Waiting for bulk rejections"); - assertBusy(() -> assertThat(taskStatus(action).getBulkRetries(), greaterThan(0L))); + logger.info("Waiting for bulk rejections"); + assertBusy(() -> assertThat(taskStatus(action).getBulkRetries(), greaterThan(0L))); - // Keep a copy of the current number of search rejections so we can assert that we get more when we block the scroll - long initialSearchRejections = taskStatus(action).getSearchRetries(); + // Keep a copy of the current number of search rejections so we can assert that we get more when we block the scroll + long initialSearchRejections = taskStatus(action).getSearchRetries(); - logger.info("Blocking search and unblocking bulk so we should get search rejections for the scroll"); - CyclicBarrier scrollBlock = blockExecutor(ThreadPool.Names.SEARCH); - bulkBlock.await(); + logger.info("Blocking search and unblocking bulk so we should get search rejections for the scroll"); + CyclicBarrier scrollBlock = blockExecutor(ThreadPool.Names.SEARCH); + bulkBlock.await(); - logger.info("Waiting for search rejections for the scroll"); - assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(initialSearchRejections))); + logger.info("Waiting for search rejections for the scroll"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(initialSearchRejections))); - logger.info("Unblocking the scroll"); - scrollBlock.await(); + logger.info("Unblocking the scroll"); + scrollBlock.await(); - logger.info("Waiting for the request to finish"); - BulkIndexByScrollResponse response = responseListener.get(); - assertThat(response, matcher); - assertThat(response.getBulkRetries(), greaterThan(0L)); - assertThat(response.getSearchRetries(), greaterThan(initialSearchRejections)); + logger.info("Waiting for the request to finish"); + BulkIndexByScrollResponse response = responseListener.get(); + assertThat(response, matcher); + assertThat(response.getBulkRetries(), greaterThan(0L)); + assertThat(response.getSearchRetries(), greaterThan(initialSearchRejections)); + } finally { + // Fetch the response just in case we blew up half way through. This will make sure the failure is thrown up to the top level. + BulkIndexByScrollResponse response = responseListener.get(); + assertThat(response.getSearchFailures(), empty()); + assertThat(response.getBulkFailures(), empty()); + } } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index d1cb77361bb..3e3b3a63d62 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -19,20 +19,21 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.Index; -import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -56,11 +57,28 @@ public class RoundTripTests extends ESTestCase { randomRequest(reindex); reindex.getDestination().version(randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED, 12L, 1L, 123124L, 12L)); reindex.getDestination().index("test"); + if (randomBoolean()) { + int port = between(1, Integer.MAX_VALUE); + BytesReference query = new BytesArray(randomAsciiOfLength(5)); + String username = randomBoolean() ? randomAsciiOfLength(5) : null; + String password = username != null && randomBoolean() ? randomAsciiOfLength(5) : null; + reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), port, query, username, password)); + } ReindexRequest tripped = new ReindexRequest(); roundTrip(reindex, tripped); assertRequestEquals(reindex, tripped); assertEquals(reindex.getDestination().version(), tripped.getDestination().version()); assertEquals(reindex.getDestination().index(), tripped.getDestination().index()); + if (reindex.getRemoteInfo() == null) { + assertNull(tripped.getRemoteInfo()); + } else { + assertNotNull(tripped.getRemoteInfo()); + assertEquals(reindex.getRemoteInfo().getScheme(), tripped.getRemoteInfo().getScheme()); + assertEquals(reindex.getRemoteInfo().getHost(), tripped.getRemoteInfo().getHost()); + assertEquals(reindex.getRemoteInfo().getQuery(), tripped.getRemoteInfo().getQuery()); + assertEquals(reindex.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername()); + assertEquals(reindex.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword()); + } } public void testUpdateByQueryRequest() throws IOException { @@ -149,13 +167,19 @@ public class RoundTripTests extends ESTestCase { randomSimpleString(random()), new IllegalArgumentException("test"))); } - private List randomSearchFailures() { - if (usually()) { + private List randomSearchFailures() { + if (randomBoolean()) { return emptyList(); } - Index index = new Index(randomSimpleString(random()), "uuid"); - return singletonList(new ShardSearchFailure(randomSimpleString(random()), - new SearchShardTarget(randomSimpleString(random()), index, randomInt()), randomFrom(RestStatus.values()))); + String index = null; + Integer shardId = null; + String nodeId = null; + if (randomBoolean()) { + index = randomAsciiOfLength(5); + shardId = randomInt(); + nodeId = usually() ? randomAsciiOfLength(5) : null; + } + return singletonList(new SearchFailure(new ElasticsearchException("foo"), index, shardId, nodeId)); } private void roundTrip(Streamable example, Streamable empty) throws IOException { @@ -182,10 +206,10 @@ public class RoundTripTests extends ESTestCase { private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) { assertEquals(expected.getTook(), actual.getTook()); assertTaskStatusEquals(expected.getStatus(), actual.getStatus()); - assertEquals(expected.getIndexingFailures().size(), actual.getIndexingFailures().size()); - for (int i = 0; i < expected.getIndexingFailures().size(); i++) { - Failure expectedFailure = expected.getIndexingFailures().get(i); - Failure actualFailure = actual.getIndexingFailures().get(i); + assertEquals(expected.getBulkFailures().size(), actual.getBulkFailures().size()); + for (int i = 0; i < expected.getBulkFailures().size(); i++) { + Failure expectedFailure = expected.getBulkFailures().get(i); + Failure actualFailure = actual.getBulkFailures().get(i); assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); assertEquals(expectedFailure.getType(), actualFailure.getType()); assertEquals(expectedFailure.getId(), actualFailure.getId()); @@ -194,13 +218,15 @@ public class RoundTripTests extends ESTestCase { } assertEquals(expected.getSearchFailures().size(), actual.getSearchFailures().size()); for (int i = 0; i < expected.getSearchFailures().size(); i++) { - ShardSearchFailure expectedFailure = expected.getSearchFailures().get(i); - ShardSearchFailure actualFailure = actual.getSearchFailures().get(i); - assertEquals(expectedFailure.shard(), actualFailure.shard()); - assertEquals(expectedFailure.status(), actualFailure.status()); - // We can't use getCause because throwable doesn't implement equals - assertEquals(expectedFailure.reason(), actualFailure.reason()); + SearchFailure expectedFailure = expected.getSearchFailures().get(i); + SearchFailure actualFailure = actual.getSearchFailures().get(i); + assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); + assertEquals(expectedFailure.getShardId(), actualFailure.getShardId()); + assertEquals(expectedFailure.getNodeId(), actualFailure.getNodeId()); + assertEquals(expectedFailure.getReason().getClass(), actualFailure.getReason().getClass()); + assertEquals(expectedFailure.getReason().getMessage(), actualFailure.getReason().getMessage()); } + } private void assertTaskStatusEquals(BulkByScrollTask.Status expected, BulkByScrollTask.Status actual) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index bb6a33b593a..6ebb0749792 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -21,13 +21,12 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class UpdateByQueryMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingIsCopied() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java index faea69b870f..6bbcbd6e643 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -44,7 +44,7 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { AtomicReference value = new AtomicReference<>(randomSimpleString(random())); indexRandom(true, client().prepareIndex("test", "test", "test").setSource("test", value.get())); - AtomicReference failure = new AtomicReference<>(); + AtomicReference failure = new AtomicReference<>(); AtomicBoolean keepUpdating = new AtomicBoolean(true); Thread updater = new Thread(() -> { while (keepUpdating.get()) { @@ -52,8 +52,8 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { BulkIndexByScrollResponse response = updateByQuery().source("test").refresh(true).abortOnVersionConflict(false).get(); assertThat(response, matcher().updated(either(equalTo(0L)).or(equalTo(1L))) .versionConflicts(either(equalTo(0L)).or(equalTo(1L)))); - } catch (Throwable t) { - failure.set(t); + } catch (Exception e) { + failure.set(e); } } }); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 1c57c202766..c5b9d4da64f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.script.ScriptService; import java.util.Date; @@ -48,7 +49,7 @@ public class UpdateByQueryWithScriptTests @Override protected UpdateByQueryRequest request() { - return new UpdateByQueryRequest(); + return new UpdateByQueryRequest(new SearchRequest()); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java new file mode 100644 index 00000000000..5492a05986c --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; + +public class RemoteInfoTests extends ESTestCase { + public void testToString() { + RemoteInfo info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), null, null); + assertEquals("host=testhost port=12344 query=testquery", info.toString()); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", null); + assertEquals("host=testhost port=12344 query=testquery username=testuser", info.toString()); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); + info = new RemoteInfo("https", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java new file mode 100644 index 00000000000..9bbfd175a79 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -0,0 +1,181 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchPath; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollParams; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class RemoteRequestBuildersTests extends ESTestCase { + public void testIntialSearchPath() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + assertEquals("/_search", initialSearchPath(searchRequest)); + searchRequest.indices("a"); + searchRequest.types("b"); + assertEquals("/a/b/_search", initialSearchPath(searchRequest)); + searchRequest.indices("a", "b"); + searchRequest.types("c", "d"); + assertEquals("/a,b/c,d/_search", initialSearchPath(searchRequest)); + + searchRequest.indices("cat,"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("cat,", "dog"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("dog", "cat,"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("cat/"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + searchRequest.indices("cat/", "dog"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + searchRequest.indices("dog", "cat/"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + + searchRequest.indices("ok"); + searchRequest.types("cat,"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("cat,", "dog"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("dog", "cat,"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("cat/"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + searchRequest.types("cat/", "dog"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + searchRequest.types("dog", "cat/"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + } + + private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> initialSearchPath(searchRequest)); + assertEquals(type + " containing [" + bad + "] not supported but got [" + failed + "]", e.getMessage()); + } + + public void testInitialSearchParamsSort() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + // Test sort:_doc for versions that support it. + Version remoteVersion = Version.fromId(between(Version.V_2_1_0_ID, Version.CURRENT.id)); + searchRequest.source().sort("_doc"); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sorts", "_doc:asc")); + + // Test search_type scan for versions that don't support sort:_doc. + remoteVersion = Version.fromId(between(0, Version.V_2_1_0_ID - 1)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("search_type", "scan")); + + // Test sorting by some field. Version doesn't matter. + remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + searchRequest.source().sorts().clear(); + searchRequest.source().sort("foo"); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sorts", "foo:asc")); + } + + public void testInitialSearchParamsFields() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + // Test request without any fields + Version remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + assertThat(initialSearchParams(searchRequest, remoteVersion), + not(either(hasKey("stored_fields")).or(hasKey("fields")))); + + // Setup some fields for the next two tests + searchRequest.source().storedField("_source").storedField("_id"); + + // Test stored_fields for versions that support it + remoteVersion = Version.fromId(between(Version.V_5_0_0_alpha4_ID, Version.CURRENT.id)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("stored_fields", "_source,_id")); + + // Test fields for versions that support it + remoteVersion = Version.fromId(between(0, Version.V_5_0_0_alpha4_ID - 1)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id")); + } + + public void testInitialSearchParamsMisc() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + Version remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + + TimeValue scroll = null; + if (randomBoolean()) { + scroll = TimeValue.parseTimeValue(randomPositiveTimeValue(), "test"); + searchRequest.scroll(scroll); + } + int size = between(0, Integer.MAX_VALUE); + searchRequest.source().size(size); + Boolean fetchVersion = null; + if (randomBoolean()) { + fetchVersion = randomBoolean(); + searchRequest.source().version(fetchVersion); + } + + Map params = initialSearchParams(searchRequest, remoteVersion); + + assertThat(params, scroll == null ? not(hasKey("scroll")) : hasEntry("scroll", scroll.toString())); + assertThat(params, hasEntry("size", Integer.toString(size))); + assertThat(params, fetchVersion == null || fetchVersion == true ? hasEntry("version", null) : not(hasEntry("version", null))); + } + + public void testInitialSearchEntity() throws IOException { + String query = "{\"match_all\":{}}"; + HttpEntity entity = initialSearchEntity(new BytesArray(query)); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertEquals("{\"query\":" + query + "}", + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + + // Invalid XContent fails + RuntimeException e = expectThrows(RuntimeException.class, () -> initialSearchEntity(new BytesArray("{}, \"trailing\": {}"))); + assertThat(e.getCause().getMessage(), containsString("Unexpected character (',' (code 44))")); + e = expectThrows(RuntimeException.class, () -> initialSearchEntity(new BytesArray("{"))); + assertThat(e.getCause().getMessage(), containsString("Unexpected end-of-input")); + } + + public void testScrollParams() { + TimeValue scroll = TimeValue.parseTimeValue(randomPositiveTimeValue(), "test"); + assertThat(scrollParams(scroll), hasEntry("scroll", scroll.toString())); + } + + public void testScrollEntity() throws IOException { + String scroll = randomAsciiOfLength(30); + HttpEntity entity = scrollEntity(scroll); + assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType().getValue()); + assertEquals(scroll, Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java new file mode 100644 index 00000000000..f8f3e82b4bb --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -0,0 +1,381 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.elasticsearch.Version; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.ScrollableHitSource.Response; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource.ResponseListener; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.concurrent.Executor; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; + +public class RemoteScrollableHitSourceTests extends ESTestCase { + private final String FAKE_SCROLL_ID = "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll"; + private int retries; + private ThreadPool threadPool; + private SearchRequest searchRequest; + private int retriesAllowed; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + threadPool = new TestThreadPool(getTestName()) { + @Override + public Executor executor(String name) { + return r -> r.run(); + } + + @Override + public ScheduledFuture schedule(TimeValue delay, String name, Runnable command) { + command.run(); + return null; + } + }; + retries = 0; + searchRequest = new SearchRequest(); + searchRequest.scroll(timeValueMinutes(5)); + searchRequest.source(new SearchSourceBuilder().size(10).version(true).sort("_doc").size(123)); + retriesAllowed = 0; + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testLookupRemoteVersion() throws Exception { + sourceWithMockedRemoteCall(false, "main/0_20_5.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("0.20.5"), v)); + sourceWithMockedRemoteCall(false, "main/0_90_13.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("0.90.13"), v)); + sourceWithMockedRemoteCall(false, "main/1_7_5.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("1.7.5"), v)); + sourceWithMockedRemoteCall(false, "main/2_3_3.json").lookupRemoteVersion(v -> assertEquals(Version.V_2_3_3, v)); + sourceWithMockedRemoteCall(false, "main/5_0_0_alpha_3.json").lookupRemoteVersion(v -> assertEquals(Version.V_5_0_0_alpha3, v)); + } + + public void testParseStartOk() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + public void testParseScrollOk() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("scroll_ok.json").doStartNextScroll("", timeValueMillis(0), r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + /** + * Test for parsing _ttl, _timestamp, and _routing. + */ + public void testParseScrollFullyLoaded() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), r -> { + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); + assertEquals((Long) 123444L, r.getHits().get(0).getTimestamp()); + assertEquals("testrouting", r.getHits().get(0).getRouting()); + assertEquals("testparent", r.getHits().get(0).getParent()); + called.set(true); + }); + assertTrue(called.get()); + } + + /** + * Versions of Elasticsearch before 2.1.0 don't support sort:_doc and instead need to use search_type=scan. Scan doesn't return + * documents the first iteration but reindex doesn't like that. So we jump start strait to the next iteration. + */ + public void testScanJumpStart() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("start_scan.json", "scroll_ok.json").doStart(r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + public void testParseRejection() throws Exception { + // The rejection comes through in the handler because the mocked http response isn't marked as an error + AtomicBoolean called = new AtomicBoolean(); + // Handling a scroll rejection is the same as handling a search rejection so we reuse the verification code + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertEquals("test", r.getFailures().get(0).getIndex()); + assertEquals((Integer) 0, r.getFailures().get(0).getShardId()); + assertEquals("87A7NvevQxSrEwMbtRCecg", r.getFailures().get(0).getNodeId()); + assertThat(r.getFailures().get(0).getReason(), instanceOf(EsRejectedExecutionException.class)); + assertEquals("rejected execution of org.elasticsearch.transport.TransportService$5@52d06af2 on " + + "EsThreadPoolExecutor[search, queue capacity = 1000, org.elasticsearch.common.util.concurrent." + + "EsThreadPoolExecutor@778ea553[Running, pool size = 7, active threads = 7, queued tasks = 1000, " + + "completed tasks = 4182]]", r.getFailures().get(0).getReason().getMessage()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString()); + called.set(true); + }; + sourceWithMockedRemoteCall("rejection.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("rejection.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testParseFailureWithStatus() throws Exception { + // The rejection comes through in the handler because the mocked http response isn't marked as an error + AtomicBoolean called = new AtomicBoolean(); + // Handling a scroll rejection is the same as handling a search rejection so we reuse the verification code + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(10000, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertEquals(null, r.getFailures().get(0).getIndex()); + assertEquals(null, r.getFailures().get(0).getShardId()); + assertEquals(null, r.getFailures().get(0).getNodeId()); + assertThat(r.getFailures().get(0).getReason(), instanceOf(RuntimeException.class)); + assertEquals("Unknown remote exception with reason=[SearchContextMissingException[No search context found for id [82]]]", + r.getFailures().get(0).getReason().getMessage()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("10000", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString()); + called.set(true); + }; + sourceWithMockedRemoteCall("failure_with_status.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testParseRequestFailure() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertNull(r.getScrollId()); + assertEquals(0, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertThat(r.getFailures().get(0).getReason(), instanceOf(ParsingException.class)); + ParsingException failure = (ParsingException) r.getFailures().get(0).getReason(); + assertEquals("Unknown key for a VALUE_STRING in [invalid].", failure.getMessage()); + assertEquals(2, failure.getLineNumber()); + assertEquals(14, failure.getColumnNumber()); + called.set(true); + }; + sourceWithMockedRemoteCall("request_failure.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("request_failure.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testRetryAndSucceed() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> { + assertThat(r.getFailures(), hasSize(0)); + called.set(true); + }; + retriesAllowed = between(1, Integer.MAX_VALUE); + sourceWithMockedRemoteCall("fail:rejection.json", "start_ok.json").doStart(checkResponse); + assertTrue(called.get()); + assertEquals(1, retries); + retries = 0; + called.set(false); + sourceWithMockedRemoteCall("fail:rejection.json", "scroll_ok.json").doStartNextScroll("scroll", timeValueMillis(0), + checkResponse); + assertTrue(called.get()); + assertEquals(1, retries); + } + + public void testRetryUntilYouRunOutOfTries() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> called.set(true); + retriesAllowed = between(0, 10); + String[] paths = new String[retriesAllowed + 2]; + for (int i = 0; i < retriesAllowed + 2; i++) { + paths[i] = "fail:rejection.json"; + } + RuntimeException e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall(paths).doStart(checkResponse)); + assertEquals("failed", e.getMessage()); + assertFalse(called.get()); + assertEquals(retriesAllowed, retries); + retries = 0; + e = expectThrows(RuntimeException.class, + () -> sourceWithMockedRemoteCall(paths).doStartNextScroll("scroll", timeValueMillis(0), checkResponse)); + assertEquals("failed", e.getMessage()); + assertFalse(called.get()); + assertEquals(retriesAllowed, retries); + } + + private RemoteScrollableHitSource sourceWithMockedRemoteCall(String... paths) throws Exception { + return sourceWithMockedRemoteCall(true, paths); + } + + /** + * Creates a hit source that doesn't make the remote request and instead returns data from some files. Also requests are always returned + * synchronously rather than asynchronously. + */ + private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteVersion, String... paths) throws Exception { + URL[] resources = new URL[paths.length]; + for (int i = 0; i < paths.length; i++) { + resources[i] = Thread.currentThread().getContextClassLoader().getResource("responses/" + paths[i].replace("fail:", "")); + if (resources[i] == null) { + throw new IllegalArgumentException("Couldn't find [" + paths[i] + "]"); + } + } + RemoteScrollableHitSource.AsyncClient client = new RemoteScrollableHitSource.AsyncClient() { + int responseCount = 0; + @Override + public void performRequest(String method, String uri, Map params, HttpEntity entity, + ResponseListener listener) { + try { + URL resource = resources[responseCount]; + String path = paths[responseCount++]; + InputStream stream = resource.openStream(); + if (path.startsWith("fail:")) { + String body = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); + if (path.equals("fail:rejection.json")) { + listener.onRetryableFailure(new RuntimeException(body)); + } else { + listener.onFailure(new RuntimeException(body)); + } + } else { + listener.onResponse(stream); + } + } catch (IOException e) { + listener.onFailure(e); + } + } + + @Override + public void close() throws IOException { + } + }; + TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(client) { + @Override + void lookupRemoteVersion(Consumer onVersion) { + if (mockRemoteVersion) { + onVersion.accept(Version.CURRENT); + } else { + super.lookupRemoteVersion(onVersion); + } + } + }; + if (mockRemoteVersion) { + hitSource.remoteVersion = Version.CURRENT; + } + return hitSource; + } + + private BackoffPolicy backoff() { + return BackoffPolicy.constantBackoff(timeValueMillis(0), retriesAllowed); + } + + private void countRetry() { + retries += 1; + } + + private void failRequest(Throwable t) { + throw new RuntimeException("failed", t); + } + + private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { + public TestRemoteScrollableHitSource(RemoteScrollableHitSource.AsyncClient client) { + super(RemoteScrollableHitSourceTests.this.logger, backoff(), RemoteScrollableHitSourceTests.this.threadPool, + RemoteScrollableHitSourceTests.this::countRetry, RemoteScrollableHitSourceTests.this::failRequest, client, + new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); + } + } +} diff --git a/modules/reindex/src/test/resources/responses/failure_with_status.json b/modules/reindex/src/test/resources/responses/failure_with_status.json new file mode 100644 index 00000000000..314de37a679 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/failure_with_status.json @@ -0,0 +1,28 @@ +{ + "_scroll_id": "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took": 3, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 4, + "failed": 1, + "failures": [ { + "status": 404, + "reason": "SearchContextMissingException[No search context found for id [82]]" + } ] + }, + "hits": { + "total": 10000, + "max_score": 0.0, + "hits": [ { + "_index": "test", + "_type": "test", + "_id": "10000", + "_version": 1, + "_score": 0.0, + "_source": { + "test": "test10000" + } + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/main/0_20_5.json b/modules/reindex/src/test/resources/responses/main/0_20_5.json new file mode 100644 index 00000000000..58ca8e9428f --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/0_20_5.json @@ -0,0 +1,10 @@ +{ + "ok" : true, + "status" : 200, + "name" : "Techno", + "version" : { + "number" : "0.20.5", + "snapshot_build" : false + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/0_90_13.json b/modules/reindex/src/test/resources/responses/main/0_90_13.json new file mode 100644 index 00000000000..1b104e068d9 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/0_90_13.json @@ -0,0 +1,13 @@ +{ + "ok" : true, + "status" : 200, + "name" : "Mogul of the Mystic Mountain", + "version" : { + "number" : "0.90.13", + "build_hash" : "249c9c5e06765c9e929e92b1d235e1ba4dc679fa", + "build_timestamp" : "2014-03-25T15:27:12Z", + "build_snapshot" : false, + "lucene_version" : "4.6" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/1_7_5.json b/modules/reindex/src/test/resources/responses/main/1_7_5.json new file mode 100644 index 00000000000..0fe721defee --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/1_7_5.json @@ -0,0 +1,13 @@ +{ + "status" : 200, + "name" : "Robert Kelly", + "cluster_name" : "elasticsearch", + "version" : { + "number" : "1.7.5", + "build_hash" : "00f95f4ffca6de89d68b7ccaf80d148f1f70e4d4", + "build_timestamp" : "2016-02-02T09:55:30Z", + "build_snapshot" : false, + "lucene_version" : "4.10.4" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/2_3_3.json b/modules/reindex/src/test/resources/responses/main/2_3_3.json new file mode 100644 index 00000000000..8cd90b3b637 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/2_3_3.json @@ -0,0 +1,12 @@ +{ + "name" : "Ezekiel Stane", + "cluster_name" : "elasticsearch", + "version" : { + "number" : "2.3.3", + "build_hash" : "218bdf10790eef486ff2c41a3df5cfa32dadcfde", + "build_timestamp" : "2016-05-17T15:40:04Z", + "build_snapshot" : false, + "lucene_version" : "5.5.0" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json b/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json new file mode 100644 index 00000000000..6911f61c3e9 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json @@ -0,0 +1,12 @@ +{ + "name" : "Paibo", + "cluster_name" : "distribution_run", + "version" : { + "number" : "5.0.0-alpha3", + "build_hash" : "42e092f", + "build_date" : "2016-05-26T16:55:45.405Z", + "build_snapshot" : true, + "lucene_version" : "6.0.0" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/rejection.json b/modules/reindex/src/test/resources/responses/rejection.json new file mode 100644 index 00000000000..36120fbf888 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/rejection.json @@ -0,0 +1,34 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 6, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 4, + "failed" : 1, + "failures" : [ { + "shard" : 0, + "index" : "test", + "node" : "87A7NvevQxSrEwMbtRCecg", + "reason" : { + "type" : "es_rejected_execution_exception", + "reason" : "rejected execution of org.elasticsearch.transport.TransportService$5@52d06af2 on EsThreadPoolExecutor[search, queue capacity = 1000, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@778ea553[Running, pool size = 7, active threads = 7, queued tasks = 1000, completed tasks = 4182]]" + } + } ] + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiC250DjIiBO3yJ_", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test1" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/request_failure.json b/modules/reindex/src/test/resources/responses/request_failure.json new file mode 100644 index 00000000000..6f6de78c060 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/request_failure.json @@ -0,0 +1,15 @@ +{ + "error" : { + "root_cause" : [ { + "type" : "parsing_exception", + "reason" : "Unknown key for a VALUE_STRING in [invalid].", + "line" : 2, + "col" : 14 + } ], + "type" : "parsing_exception", + "reason" : "Unknown key for a VALUE_STRING in [invalid].", + "line" : 2, + "col" : 14 + }, + "status" : 400 +} diff --git a/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json b/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json new file mode 100644 index 00000000000..a2c1be34e5c --- /dev/null +++ b/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json @@ -0,0 +1,30 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 3, + "timed_out" : false, + "terminated_early" : true, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiDL50DjIiBO3yKA", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test3" + }, + "sort" : [ 0 ], + "_routing": "testrouting", + "_parent": "testparent", + "_ttl" : 1234, + "_timestamp": 123444 + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/scroll_ok.json b/modules/reindex/src/test/resources/responses/scroll_ok.json new file mode 100644 index 00000000000..5cdc4a400cb --- /dev/null +++ b/modules/reindex/src/test/resources/responses/scroll_ok.json @@ -0,0 +1,26 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 3, + "timed_out" : false, + "terminated_early" : true, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiDL50DjIiBO3yKA", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test3" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/start_ok.json b/modules/reindex/src/test/resources/responses/start_ok.json new file mode 100644 index 00000000000..a2988341f8c --- /dev/null +++ b/modules/reindex/src/test/resources/responses/start_ok.json @@ -0,0 +1,25 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 6, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiC250DjIiBO3yJ_", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test2" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/start_scan.json b/modules/reindex/src/test/resources/responses/start_scan.json new file mode 100644 index 00000000000..5576d708b30 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/start_scan.json @@ -0,0 +1,15 @@ +{ + "_scroll_id" : "c2Nhbjs1OzQ0Ojd5aUZoUm5hU2lDZ3ZvUHMzMXdGQ1E7NDU6N3lpRmhSbmFTaUNndm9QczMxd0ZDUTs0Mjo3eWlGaFJuYVNpQ2d2b1BzMzF3RkNROzQzOjd5aUZoUm5hU2lDZ3ZvUHMzMXdGQ1E7NDE6N3lpRmhSbmFTaUNndm9QczMxd0ZDUTsxO3RvdGFsX2hpdHM6MTAwMDA7", + "took" : 13, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 10000, + "max_score" : 0.0, + "hits" : [ ] + } +} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml index 85cd6143d69..041aa127cd4 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -69,7 +69,7 @@ - is_false: deleted - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task - is_false: node_failures diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml new file mode 100644 index 00000000000..c81305e2824 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml @@ -0,0 +1,29 @@ +--- +"delete_by_query fails to delete documents with version number equal to zero": + - do: + index: + index: index1 + type: type1 + id: 1 + version: 0 # Starting version is zero + version_type: external + body: {"delete": 0} + - do: + indices.refresh: {} + + # Delete by query uses internal versioning and will fail here + # because zero is not allowed as a valid version number + - do: + catch: /illegal version value \[0\] for version type \[INTERNAL\]./ + delete_by_query: + index: index1 + refresh: true + body: + query: + match_all: {} + - do: + get: + index: index1 + type: type1 + id: 1 + - match: {_version: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml index 65a22781550..96cfaa42b5a 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml @@ -134,7 +134,7 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task @@ -197,6 +197,6 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml index addbebd44a7..a567ca67bfa 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/10_basic.yaml @@ -93,7 +93,7 @@ - is_false: deleted - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task - is_false: node_failures diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index b3e35d2f49e..e31c8f84cec 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -224,3 +224,46 @@ stored_fields: [_id] dest: index: dest + +--- +"unwhitelisted remote host fails": + - do: + catch: /\[badremote:9200\] not whitelisted in reindex.remote.whitelist/ + reindex: + body: + source: + remote: + host: http://badremote:9200 + index: test + dest: + index: dest + +--- +"badly formatted remote host fails": + - do: + catch: /\[host\] must be of the form \[scheme\].//\[host\].\[port\]/ + reindex: + body: + source: + remote: + host: badremote + weird: stuff + badkey: is bad + index: test + dest: + index: dest + +--- +"junk in remote fails": + - do: + catch: /Unsupported fields in \[remote\]. \[weird,badkey\]/ + reindex: + body: + source: + remote: + host: http://okremote:9200 + weird: stuff + badkey: is bad + index: test + dest: + index: dest diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml index 73e1a3a3a94..05d7668ed2e 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml @@ -156,7 +156,7 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task @@ -214,6 +214,6 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml new file mode 100644 index 00000000000..6adac98ad77 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml @@ -0,0 +1,207 @@ +--- +"Basic reindex from remote": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + refresh: true + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 1} + - match: {updated: 0} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + - is_false: deleted + + - do: + search: + index: dest + body: + query: + match: + text: test + - match: {hits.total: 1} + +--- +"Reindex from remote with query": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + index: + index: source + type: foo + id: 2 + body: { "text": "test2" } + - do: + indices.refresh: {} + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + query: + match: + text: test2 + dest: + index: dest + - match: {created: 1} + + - do: + search: + index: dest + body: + query: + match_all: {} + - match: {hits.total: 1} + +--- +"Reindex from remote with routing": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + routing: foo + refresh: true + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 1} + + - do: + search: + index: dest + routing: foo + body: + query: + match: + text: test + - match: {hits.total: 1} + +--- +"Reindex from remote with parent/child": + - do: + indices.create: + index: source + body: + mappings: + foo: {} + bar: + _parent: + type: foo + - do: + indices.create: + index: dest + body: + mappings: + foo: {} + bar: + _parent: + type: foo + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + index: + index: source + type: bar + id: 1 + parent: 1 + body: { "text": "test2" } + - do: + indices.refresh: {} + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 2} + + - do: + search: + index: dest + body: + query: + has_parent: + parent_type: foo + query: + match: + text: test + - match: {hits.total: 1} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml index 62c8677921d..17b2dc77816 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml @@ -53,7 +53,7 @@ - is_false: deleted - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task - is_false: node_failures diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml index ac1cbe4417e..1718714defd 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml @@ -21,3 +21,30 @@ type: test id: 1 - match: {_version: 2} + +--- +"update_by_query fails to update documents with version number equal to zero": + - do: + index: + index: index1 + type: type1 + id: 1 + version: 0 # Starting version is zero + version_type: external + body: {"update": 0} + - do: + indices.refresh: {} + + # Update by query uses internal versioning and will fail here + # because zero is not allowed as a valid version number + - do: + catch: /illegal version value \[0\] for version type \[INTERNAL\]./ + update_by_query: + index: index1 + refresh: true + - do: + get: + index: index1 + type: type1 + id: 1 + - match: {_version: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml index e13e29bc3f5..7ecf7000bfd 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml @@ -122,7 +122,7 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task @@ -172,6 +172,6 @@ task_id: $task - do: - task.get: + tasks.get: wait_for_completion: true task_id: $task diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index e04724ee370..eac3ceebc16 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -40,9 +40,6 @@ import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; - -/** - */ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private final ICUTokenizerConfig config; @@ -101,8 +98,8 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { }; return config; } - } catch (Throwable t) { - throw new ElasticsearchException("failed to load ICU rule files", t); + } catch (Exception e) { + throw new ElasticsearchException("failed to load ICU rule files", e); } } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java index e0e6c3c2e36..059dabb4f46 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/plugin/analysis/icu/AnalysisICUPlugin.java @@ -19,26 +19,42 @@ package org.elasticsearch.plugin.analysis.icu; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.IcuCollationTokenFilterFactory; import org.elasticsearch.index.analysis.IcuFoldingTokenFilterFactory; import org.elasticsearch.index.analysis.IcuNormalizerCharFilterFactory; import org.elasticsearch.index.analysis.IcuNormalizerTokenFilterFactory; import org.elasticsearch.index.analysis.IcuTokenizerFactory; import org.elasticsearch.index.analysis.IcuTransformTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisICUPlugin extends Plugin { +import java.util.HashMap; +import java.util.Map; - /** - * Automatically called with the analysis module. - */ - public void onModule(AnalysisModule module) { - module.registerCharFilter("icu_normalizer", IcuNormalizerCharFilterFactory::new); - module.registerTokenizer("icu_tokenizer", IcuTokenizerFactory::new); - module.registerTokenFilter("icu_normalizer", IcuNormalizerTokenFilterFactory::new); - module.registerTokenFilter("icu_folding", IcuFoldingTokenFilterFactory::new); - module.registerTokenFilter("icu_collation", IcuCollationTokenFilterFactory::new); - module.registerTokenFilter("icu_transform", IcuTransformTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisICUPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getCharFilters() { + return singletonMap("icu_normalizer", IcuNormalizerCharFilterFactory::new); + } + + @Override + public Map> getTokenFilters() { + Map> extra = new HashMap<>(); + extra.put("icu_normalizer", IcuNormalizerTokenFilterFactory::new); + extra.put("icu_folding", IcuFoldingTokenFilterFactory::new); + extra.put("icu_collation", IcuCollationTokenFilterFactory::new); + extra.put("icu_transform", IcuTransformTokenFilterFactory::new); + return extra; + } + + @Override + public Map> getTokenizers() { + return singletonMap("icu_tokenizer", IcuTokenizerFactory::new); } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java index 1630d514ae3..180c4268612 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuTokenizerFactoryTests.java @@ -102,6 +102,6 @@ public class IcuTokenizerFactoryTests extends ESTestCase { .build(); Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); - return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisICUPlugin()::onModule); + return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisICUPlugin()); } } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java index b399dfd34f4..9255a250f16 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java @@ -31,8 +31,7 @@ import static org.hamcrest.Matchers.instanceOf; */ public class SimpleIcuAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), - Settings.EMPTY, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisICUPlugin()); TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer"); assertThat(tokenizerFactory, instanceOf(IcuTokenizerFactory.class)); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 71752196af0..62e9c9db145 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -50,7 +50,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I WİLL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -66,7 +66,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.decomposition", "canonical") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"); @@ -82,7 +82,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "secondary") .put("index.analysis.filter.myCollator.decomposition", "no") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "TESTING", "testing"); @@ -99,7 +99,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.alternate", "shifted") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo-bar", "foo bar"); @@ -117,7 +117,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.alternate", "shifted") .put("index.analysis.filter.myCollator.variableTop", " ") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "foo bar", "foobar"); @@ -135,7 +135,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.numeric", "true") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "foobar-9", "foobar-10", -1); @@ -152,7 +152,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "primary") .put("index.analysis.filter.myCollator.caseLevel", "true") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "résumé", "resume"); @@ -172,7 +172,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.strength", "tertiary") .put("index.analysis.filter.myCollator.caseFirst", "upper") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollation(filterFactory, "Resume", "resume", -1); @@ -200,7 +200,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { .put("index.analysis.filter.myCollator.rules", tailoredRules) .put("index.analysis.filter.myCollator.strength", "primary") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myCollator"); assertCollatesToSame(filterFactory, "Töne", "Toene"); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index 96defd0e076..b82accf0cf8 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -37,7 +37,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { Settings settings = Settings.builder() .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; @@ -61,7 +61,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { .put("index.analysis.char_filter.myNormalizerChar.name", "nfkc") .put("index.analysis.char_filter.myNormalizerChar.mode", "decompose") .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisICUPlugin()); CharFilterFactory charFilterFactory = analysisService.charFilter("myNormalizerChar"); String input = "ʰ㌰゙5℃№㈱㌘,バッファーの正規化のテスト.㋐㋑㋒㋓㋔カキクケコザジズゼゾg̈각/각நிเกषिchkʷक्षि"; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java index e92e782a96c..c4b4db53c4a 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/AnalysisKuromojiPlugin.java @@ -19,6 +19,9 @@ package org.elasticsearch.plugin.analysis.kuromoji; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.JapaneseStopTokenFilterFactory; import org.elasticsearch.index.analysis.KuromojiAnalyzerProvider; import org.elasticsearch.index.analysis.KuromojiBaseFormFilterFactory; @@ -28,23 +31,42 @@ import org.elasticsearch.index.analysis.KuromojiNumberFilterFactory; import org.elasticsearch.index.analysis.KuromojiPartOfSpeechFilterFactory; import org.elasticsearch.index.analysis.KuromojiReadingFormFilterFactory; import org.elasticsearch.index.analysis.KuromojiTokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -/** - * - */ -public class AnalysisKuromojiPlugin extends Plugin { +import java.util.HashMap; +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerCharFilter("kuromoji_iteration_mark", KuromojiIterationMarkCharFilterFactory::new); - module.registerAnalyzer("kuromoji", KuromojiAnalyzerProvider::new); - module.registerTokenizer("kuromoji_tokenizer", KuromojiTokenizerFactory::new); - module.registerTokenFilter("kuromoji_baseform", KuromojiBaseFormFilterFactory::new); - module.registerTokenFilter("kuromoji_part_of_speech", KuromojiPartOfSpeechFilterFactory::new); - module.registerTokenFilter("kuromoji_readingform", KuromojiReadingFormFilterFactory::new); - module.registerTokenFilter("kuromoji_stemmer", KuromojiKatakanaStemmerFactory::new); - module.registerTokenFilter("ja_stop", JapaneseStopTokenFilterFactory::new); - module.registerTokenFilter("kuromoji_number", KuromojiNumberFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisKuromojiPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getCharFilters() { + return singletonMap("kuromoji_iteration_mark", KuromojiIterationMarkCharFilterFactory::new); + } + + @Override + public Map> getTokenFilters() { + Map> extra = new HashMap<>(); + extra.put("kuromoji_baseform", KuromojiBaseFormFilterFactory::new); + extra.put("kuromoji_part_of_speech", KuromojiPartOfSpeechFilterFactory::new); + extra.put("kuromoji_readingform", KuromojiReadingFormFilterFactory::new); + extra.put("kuromoji_stemmer", KuromojiKatakanaStemmerFactory::new); + extra.put("ja_stop", JapaneseStopTokenFilterFactory::new); + extra.put("kuromoji_number", KuromojiNumberFilterFactory::new); + return extra; + } + + @Override + public Map> getTokenizers() { + return singletonMap("kuromoji_tokenizer", KuromojiTokenizerFactory::new); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("kuromoji", KuromojiAnalyzerProvider::new); } } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 540e11250d0..53196ac7462 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -198,7 +198,7 @@ public class KuromojiAnalysisTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); - return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisKuromojiPlugin()::onModule); + return createAnalysisService(new Index("test", "_na_"), nodeSettings, settings, new AnalysisKuromojiPlugin()); } public static void assertSimpleTSOutput(TokenStream stream, diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java index 728a9354d97..e8b49a50edb 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java @@ -37,9 +37,9 @@ package org.elasticsearch.index.analysis.phonetic; */ public class HaasePhonetik extends KoelnerPhonetik { - private final static String[] HAASE_VARIATIONS_PATTERNS = {"OWN", "RB", "WSK", "A$", "O$", "SCH", + private static final String[] HAASE_VARIATIONS_PATTERNS = {"OWN", "RB", "WSK", "A$", "O$", "SCH", "GLI", "EAU$", "^CH", "AUX", "EUX", "ILLE"}; - private final static String[] HAASE_VARIATIONS_REPLACEMENTS = {"AUN", "RW", "RSK", "AR", "OW", "CH", + private static final String[] HAASE_VARIATIONS_REPLACEMENTS = {"AUN", "RW", "RSK", "AR", "OW", "CH", "LI", "O", "SCH", "O", "O", "I"}; @Override diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java index 3be97b444db..c7355e976ce 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPhoneticPlugin.java @@ -20,13 +20,19 @@ package org.elasticsearch.plugin.analysis; import org.elasticsearch.index.analysis.PhoneticTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisPhoneticPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerTokenFilter("phonetic", PhoneticTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisPhoneticPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + return singletonMap("phonetic", PhoneticTokenFilterFactory::new); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 18e49fa6e51..3dcfadce781 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -39,8 +39,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { Settings settings = Settings.builder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, - new AnalysisPhoneticPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), settings, new AnalysisPhoneticPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("phonetic"); MatcherAssert.assertThat(filterFactory, instanceOf(PhoneticTokenFilterFactory.class)); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java index 05c779bb5a9..b11a157c149 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/plugin/analysis/smartcn/AnalysisSmartChinesePlugin.java @@ -19,20 +19,40 @@ package org.elasticsearch.plugin.analysis.smartcn; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.SmartChineseAnalyzerProvider; import org.elasticsearch.index.analysis.SmartChineseNoOpTokenFilterFactory; import org.elasticsearch.index.analysis.SmartChineseTokenizerTokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisSmartChinesePlugin extends Plugin { +import java.util.HashMap; +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("smartcn", SmartChineseAnalyzerProvider::new); - module.registerTokenizer("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory::new); - // This is an alias to "smartcn_tokenizer"; it's here for backwards compat - module.registerTokenizer("smartcn_sentence", SmartChineseTokenizerTokenizerFactory::new); - // This is a noop token filter; it's here for backwards compat before we had "smartcn_tokenizer" - module.registerTokenFilter("smartcn_word", SmartChineseNoOpTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisSmartChinesePlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + // This is a noop token filter; it's here for backwards compat before we had "smartcn_tokenizer" + return singletonMap("smartcn_word", SmartChineseNoOpTokenFilterFactory::new); + } + + @Override + public Map> getTokenizers() { + Map> extra = new HashMap<>(); + extra.put("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory::new); + // This is an alias to "smartcn_tokenizer"; it's here for backwards compat + extra.put("smartcn_sentence", SmartChineseTokenizerTokenizerFactory::new); + return extra; + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("smartcn", SmartChineseAnalyzerProvider::new); } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index 0fcc42643d4..08aebdee2bb 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -29,11 +29,10 @@ import java.io.IOException; import static org.hamcrest.Matchers.instanceOf; -/** - */ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { - final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisSmartChinesePlugin()::onModule); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, + new AnalysisSmartChinesePlugin()); TokenizerFactory tokenizerFactory = analysisService.tokenizer("smartcn_tokenizer"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(SmartChineseTokenizerTokenizerFactory.class)); } diff --git a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java index 8549795f4b6..98dd9634fb9 100644 --- a/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java +++ b/plugins/analysis-stempel/src/main/java/org/elasticsearch/plugin/analysis/stempel/AnalysisStempelPlugin.java @@ -19,15 +19,27 @@ package org.elasticsearch.plugin.analysis.stempel; +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.pl.PolishAnalyzerProvider; import org.elasticsearch.index.analysis.pl.PolishStemTokenFilterFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; -public class AnalysisStempelPlugin extends Plugin { +import java.util.Map; - public void onModule(AnalysisModule module) { - module.registerAnalyzer("polish", PolishAnalyzerProvider::new); - module.registerTokenFilter("polish_stem", PolishStemTokenFilterFactory::new); +import static java.util.Collections.singletonMap; + +public class AnalysisStempelPlugin extends Plugin implements AnalysisPlugin { + @Override + public Map> getTokenFilters() { + return singletonMap("polish_stem", PolishStemTokenFilterFactory::new); + } + + @Override + public Map>> getAnalyzers() { + return singletonMap("polish", PolishAnalyzerProvider::new); } } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index 9bfcc2c2f3f..4f7ee642ebd 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -36,7 +36,8 @@ import static org.hamcrest.Matchers.instanceOf; */ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { - final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisStempelPlugin()::onModule); + final AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, + new AnalysisStempelPlugin()); TokenFilterFactory tokenizerFactory = analysisService.tokenFilter("polish_stem"); MatcherAssert.assertThat(tokenizerFactory, instanceOf(PolishStemTokenFilterFactory.class)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index 9458b6920c4..3fc12ccdfed 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -49,7 +49,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Settings settings = Settings.builder() .put("index.analysis.filter.myStemmer.type", "polish_stem") .build(); - AnalysisService analysisService = createAnalysisService(index, settings, new AnalysisStempelPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(index, settings, new AnalysisStempelPlugin()); TokenFilterFactory filterFactory = analysisService.tokenFilter("myStemmer"); @@ -65,8 +65,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { } private void testAnalyzer(String source, String... expected_terms) throws IOException { - AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, - new AnalysisStempelPlugin()::onModule); + AnalysisService analysisService = createAnalysisService(new Index("test", "_na_"), Settings.EMPTY, new AnalysisStempelPlugin()); Analyzer analyzer = analysisService.analyzer("polish").analyzer(); diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure-classic/build.gradle similarity index 97% rename from plugins/discovery-azure/build.gradle rename to plugins/discovery-azure-classic/build.gradle index 8f0c641e150..88874968b21 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -20,8 +20,8 @@ import org.elasticsearch.gradle.LoggedExec */ esplugin { - description 'The Azure Discovery plugin allows to use Azure API for the unicast discovery mechanism' - classname 'org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin' + description 'The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism' + classname 'org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin' } versions << [ diff --git a/plugins/discovery-azure/licenses/azure-LICENSE.txt b/plugins/discovery-azure-classic/licenses/azure-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/azure-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/azure-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/azure-NOTICE.txt b/plugins/discovery-azure-classic/licenses/azure-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/azure-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/azure-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 b/plugins/discovery-azure-classic/licenses/azure-core-0.9.3.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/azure-core-0.9.3.jar.sha1 rename to plugins/discovery-azure-classic/licenses/azure-core-0.9.3.jar.sha1 diff --git a/plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 b/plugins/discovery-azure-classic/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 rename to plugins/discovery-azure-classic/licenses/azure-svc-mgmt-compute-0.9.3.jar.sha1 diff --git a/plugins/discovery-azure-classic/licenses/commons-codec-1.10.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-codec-1.10.jar.sha1 new file mode 100644 index 00000000000..3fe8682a1b0 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-codec-1.10.jar.sha1 @@ -0,0 +1 @@ +4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/discovery-azure/LICENSE.txt b/plugins/discovery-azure-classic/licenses/commons-codec-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/LICENSE.txt rename to plugins/discovery-azure-classic/licenses/commons-codec-LICENSE.txt diff --git a/plugins/discovery-azure-classic/licenses/commons-codec-NOTICE.txt b/plugins/discovery-azure-classic/licenses/commons-codec-NOTICE.txt new file mode 100644 index 00000000000..56916449bbe --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-codec-NOTICE.txt @@ -0,0 +1,17 @@ +Apache Commons Codec +Copyright 2002-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java +contains test data from http://aspell.net/test/orig/batch0.tab. +Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org) + +=============================================================================== + +The content of package org.apache.commons.codec.language.bm has been translated +from the original php source code available at http://stevemorse.org/phoneticinfo.htm +with permission from the original authors. +Original source copyright: +Copyright (c) 2008 Alexander Beider & Stephen P. Morse. diff --git a/plugins/discovery-azure/licenses/commons-io-2.4.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-io-2.4.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/commons-io-2.4.jar.sha1 rename to plugins/discovery-azure-classic/licenses/commons-io-2.4.jar.sha1 diff --git a/plugins/discovery-azure/licenses/commons-io-LICENSE.txt b/plugins/discovery-azure-classic/licenses/commons-io-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-io-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/commons-io-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/commons-io-NOTICE.txt b/plugins/discovery-azure-classic/licenses/commons-io-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-io-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/commons-io-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/commons-lang-2.6.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-lang-2.6.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/commons-lang-2.6.jar.sha1 rename to plugins/discovery-azure-classic/licenses/commons-lang-2.6.jar.sha1 diff --git a/plugins/discovery-azure/licenses/commons-lang-LICENSE.txt b/plugins/discovery-azure-classic/licenses/commons-lang-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-lang-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/commons-lang-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/commons-lang-NOTICE.txt b/plugins/discovery-azure-classic/licenses/commons-lang-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-lang-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/commons-lang-NOTICE.txt diff --git a/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..c8756c43832 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/plugins/discovery-azure/licenses/commons-logging-LICENSE.txt b/plugins/discovery-azure-classic/licenses/commons-logging-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-logging-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/commons-logging-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/commons-logging-NOTICE.txt b/plugins/discovery-azure-classic/licenses/commons-logging-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/commons-logging-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/commons-logging-NOTICE.txt diff --git a/plugins/discovery-azure-classic/licenses/httpclient-4.5.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/httpclient-LICENSE.txt b/plugins/discovery-azure-classic/licenses/httpclient-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpclient-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/plugins/discovery-azure-classic/licenses/httpclient-NOTICE.txt b/plugins/discovery-azure-classic/licenses/httpclient-NOTICE.txt new file mode 100644 index 00000000000..4f6058178b2 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpclient-NOTICE.txt @@ -0,0 +1,5 @@ +Apache HttpComponents Client +Copyright 1999-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 b/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/httpcore-LICENSE.txt b/plugins/discovery-azure-classic/licenses/httpcore-LICENSE.txt new file mode 100644 index 00000000000..72819a9f06f --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpcore-LICENSE.txt @@ -0,0 +1,241 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project contains annotations in the package org.apache.http.annotation +which are derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. +See http://www.jcip.net and the Creative Commons Attribution License +(http://creativecommons.org/licenses/by/2.5) +Full text: http://creativecommons.org/licenses/by/2.5/legalcode + +License + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. + +1. Definitions + + "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. + "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. + "Licensor" means the individual or entity that offers the Work under the terms of this License. + "Original Author" means the individual or entity who created the Work. + "Work" means the copyrightable work of authorship offered under the terms of this License. + "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. + +2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. + +3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: + + to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; + to create and reproduce Derivative Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. + + For the avoidance of doubt, where the work is a musical composition: + Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. + Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). + Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). + +The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. + +4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: + + You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. + If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. + +5. Representations, Warranties and Disclaimer + +UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. + +6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. Termination + + This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. + Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. + +8. Miscellaneous + + Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. + Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. + If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. + This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/plugins/discovery-azure-classic/licenses/httpcore-NOTICE.txt b/plugins/discovery-azure-classic/licenses/httpcore-NOTICE.txt new file mode 100644 index 00000000000..c0be50a505e --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/httpcore-NOTICE.txt @@ -0,0 +1,8 @@ +Apache HttpComponents Core +Copyright 2005-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This project contains annotations derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net diff --git a/plugins/discovery-azure/licenses/jackson-LICENSE b/plugins/discovery-azure-classic/licenses/jackson-LICENSE similarity index 100% rename from plugins/discovery-azure/licenses/jackson-LICENSE rename to plugins/discovery-azure-classic/licenses/jackson-LICENSE diff --git a/plugins/discovery-azure/licenses/jackson-NOTICE b/plugins/discovery-azure-classic/licenses/jackson-NOTICE similarity index 100% rename from plugins/discovery-azure/licenses/jackson-NOTICE rename to plugins/discovery-azure-classic/licenses/jackson-NOTICE diff --git a/plugins/discovery-azure/licenses/jackson-core-asl-1.9.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jackson-core-asl-1.9.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jackson-core-asl-1.9.2.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jackson-core-asl-1.9.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jackson-jaxrs-1.9.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jackson-jaxrs-1.9.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jackson-jaxrs-1.9.2.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jackson-jaxrs-1.9.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jackson-mapper-asl-1.9.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jackson-mapper-asl-1.9.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jackson-mapper-asl-1.9.2.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jackson-mapper-asl-1.9.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jackson-xc-1.9.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jackson-xc-1.9.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jackson-xc-1.9.2.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jackson-xc-1.9.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/javax.inject-1.jar.sha1 b/plugins/discovery-azure-classic/licenses/javax.inject-1.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/javax.inject-1.jar.sha1 rename to plugins/discovery-azure-classic/licenses/javax.inject-1.jar.sha1 diff --git a/plugins/discovery-azure/licenses/javax.inject-LICENSE.txt b/plugins/discovery-azure-classic/licenses/javax.inject-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/javax.inject-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/javax.inject-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/javax.inject-NOTICE.txt b/plugins/discovery-azure-classic/licenses/javax.inject-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/javax.inject-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/javax.inject-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/jaxb-LICENSE.txt b/plugins/discovery-azure-classic/licenses/jaxb-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jaxb-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/jaxb-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/jaxb-NOTICE.txt b/plugins/discovery-azure-classic/licenses/jaxb-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jaxb-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/jaxb-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jaxb-api-2.2.2.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jaxb-api-2.2.2.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jaxb-impl-2.2.3-1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jaxb-impl-2.2.3-1.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jaxb-impl-2.2.3-1.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jaxb-impl-2.2.3-1.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jersey-LICENSE.txt b/plugins/discovery-azure-classic/licenses/jersey-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jersey-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/jersey-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/jersey-NOTICE.txt b/plugins/discovery-azure-classic/licenses/jersey-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jersey-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/jersey-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/jersey-client-1.13.jar.sha1 b/plugins/discovery-azure-classic/licenses/jersey-client-1.13.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jersey-client-1.13.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jersey-client-1.13.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jersey-core-1.13.jar.sha1 b/plugins/discovery-azure-classic/licenses/jersey-core-1.13.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jersey-core-1.13.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jersey-core-1.13.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jersey-json-1.13.jar.sha1 b/plugins/discovery-azure-classic/licenses/jersey-json-1.13.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jersey-json-1.13.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jersey-json-1.13.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jettison-1.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.1.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/jettison-1.1.jar.sha1 rename to plugins/discovery-azure-classic/licenses/jettison-1.1.jar.sha1 diff --git a/plugins/discovery-azure/licenses/jettison-LICENSE.txt b/plugins/discovery-azure-classic/licenses/jettison-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jettison-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/jettison-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/jettison-NOTICE.txt b/plugins/discovery-azure-classic/licenses/jettison-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/jettison-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/jettison-NOTICE.txt diff --git a/plugins/discovery-azure/licenses/mail-1.4.5.jar.sha1 b/plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 similarity index 100% rename from plugins/discovery-azure/licenses/mail-1.4.5.jar.sha1 rename to plugins/discovery-azure-classic/licenses/mail-1.4.5.jar.sha1 diff --git a/plugins/discovery-azure/licenses/mail-LICENSE.txt b/plugins/discovery-azure-classic/licenses/mail-LICENSE.txt similarity index 100% rename from plugins/discovery-azure/licenses/mail-LICENSE.txt rename to plugins/discovery-azure-classic/licenses/mail-LICENSE.txt diff --git a/plugins/discovery-azure/licenses/mail-NOTICE.txt b/plugins/discovery-azure-classic/licenses/mail-NOTICE.txt similarity index 100% rename from plugins/discovery-azure/licenses/mail-NOTICE.txt rename to plugins/discovery-azure-classic/licenses/mail-NOTICE.txt diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java similarity index 76% rename from plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java rename to plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java index 2c9c6e0a486..da684fd824d 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureDiscoveryModule.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.cloud.azure; +package org.elasticsearch.cloud.azure.classic; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cloud.azure.management.AzureComputeService; -import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; -import org.elasticsearch.cloud.azure.management.AzureComputeServiceImpl; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeServiceImpl; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Inject; @@ -31,7 +30,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; +import org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin; /** * Azure Module @@ -41,7 +40,7 @@ import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; * to AzureComputeServiceImpl. * * - * @see org.elasticsearch.cloud.azure.management.AzureComputeServiceImpl + * @see AzureComputeServiceImpl */ public class AzureDiscoveryModule extends AbstractModule { protected final ESLogger logger; @@ -77,19 +76,19 @@ public class AzureDiscoveryModule extends AbstractModule { return false; } - if (isDefined(settings, Management.SUBSCRIPTION_ID_SETTING) && - isDefined(settings, Management.SERVICE_NAME_SETTING) && - isDefined(settings, Management.KEYSTORE_PATH_SETTING) && - isDefined(settings, Management.KEYSTORE_PASSWORD_SETTING)) { + if (isDefined(settings, AzureComputeService.Management.SUBSCRIPTION_ID_SETTING) && + isDefined(settings, AzureComputeService.Management.SERVICE_NAME_SETTING) && + isDefined(settings, AzureComputeService.Management.KEYSTORE_PATH_SETTING) && + isDefined(settings, AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING)) { logger.trace("All required properties for Azure discovery are set!"); return true; } else { logger.debug("One or more Azure discovery settings are missing. " + "Check elasticsearch.yml file. Should have [{}], [{}], [{}] and [{}].", - Management.SUBSCRIPTION_ID_SETTING.getKey(), - Management.SERVICE_NAME_SETTING.getKey(), - Management.KEYSTORE_PATH_SETTING.getKey(), - Management.KEYSTORE_PASSWORD_SETTING.getKey()); + AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), + AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), + AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), + AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey()); return false; } } diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceDisableException.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceDisableException.java similarity index 95% rename from plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceDisableException.java rename to plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceDisableException.java index 487997d71b6..66488f90c31 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceDisableException.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceDisableException.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.cloud.azure; +package org.elasticsearch.cloud.azure.classic; public class AzureServiceDisableException extends IllegalStateException { public AzureServiceDisableException(String msg) { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceRemoteException.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceRemoteException.java similarity index 95% rename from plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceRemoteException.java rename to plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceRemoteException.java index 4bd4f1d67f1..c961c03ba71 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureServiceRemoteException.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/AzureServiceRemoteException.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.cloud.azure; +package org.elasticsearch.cloud.azure.classic; public class AzureServiceRemoteException extends IllegalStateException { public AzureServiceRemoteException(String msg) { diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeService.java similarity index 94% rename from plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java rename to plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeService.java index 526f98025b7..49e609aad80 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeService.java @@ -17,15 +17,15 @@ * under the License. */ -package org.elasticsearch.cloud.azure.management; +package org.elasticsearch.cloud.azure.classic.management; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; -import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider.Deployment; +import org.elasticsearch.discovery.azure.classic.AzureUnicastHostsProvider; +import org.elasticsearch.discovery.azure.classic.AzureUnicastHostsProvider.Deployment; import java.net.URI; import java.net.URISyntaxException; diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java similarity index 59% rename from plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java rename to plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 0764ec99c12..2375db2502b 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -17,30 +17,32 @@ * under the License. */ -package org.elasticsearch.cloud.azure.management; +package org.elasticsearch.cloud.azure.classic.management; import com.microsoft.windowsazure.Configuration; +import com.microsoft.windowsazure.core.Builder; +import com.microsoft.windowsazure.core.DefaultBuilder; import com.microsoft.windowsazure.core.utils.KeyStoreType; import com.microsoft.windowsazure.management.compute.ComputeManagementClient; import com.microsoft.windowsazure.management.compute.ComputeManagementService; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import com.microsoft.windowsazure.management.configuration.ManagementConfiguration; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cloud.azure.AzureServiceDisableException; -import org.elasticsearch.cloud.azure.AzureServiceRemoteException; +import org.elasticsearch.cloud.azure.classic.AzureServiceRemoteException; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import java.io.IOException; +import java.util.ServiceLoader; /** * */ -public class AzureComputeServiceImpl extends AbstractLifecycleComponent +public class AzureComputeServiceImpl extends AbstractLifecycleComponent implements AzureComputeService { - private final ComputeManagementClient computeManagementClient; + private final ComputeManagementClient client; private final String serviceName; @Inject @@ -54,28 +56,36 @@ public class AzureComputeServiceImpl extends AbstractLifecycleComponent +public abstract class AzureComputeServiceAbstractMock extends AbstractLifecycleComponent implements AzureComputeService { protected AzureComputeServiceAbstractMock(Settings settings) { diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java similarity index 98% rename from plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryClusterFormationTests.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 0d1de07ed64..505f2d8b0db 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.discovery.azure; +package org.elasticsearch.discovery.azure.classic; import com.microsoft.windowsazure.management.compute.models.DeploymentSlot; import com.microsoft.windowsazure.management.compute.models.DeploymentStatus; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; -import org.elasticsearch.cloud.azure.management.AzureComputeService; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.Loggers; @@ -33,7 +33,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; -import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; +import org.elasticsearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportSettings; diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java similarity index 96% rename from plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java index 131f73d1ca9..cb04842cb4a 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryRestIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.discovery.azure; +package org.elasticsearch.discovery.azure.classic; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureMinimumMasterNodesTests.java similarity index 84% rename from plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureMinimumMasterNodesTests.java index 46c3f8af7c2..72e1f2da791 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureMinimumMasterNodesTests.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.discovery.azure; +package org.elasticsearch.discovery.azure.classic; import org.apache.lucene.util.LuceneTestCase.AwaitsFix; -import org.elasticsearch.cloud.azure.AbstractAzureComputeServiceTestCase; -import org.elasticsearch.cloud.azure.AzureComputeServiceTwoNodesMock; +import org.elasticsearch.cloud.azure.classic.AbstractAzureComputeServiceTestCase; +import org.elasticsearch.cloud.azure.classic.AzureComputeServiceTwoNodesMock; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -63,20 +63,23 @@ public class AzureMinimumMasterNodesTests extends AbstractAzureComputeServiceTes logger.info("--> start data node / non master node"); internalCluster().startNode(); try { - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").execute().actionGet().getState().nodes().getMasterNodeId(), nullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("100ms").get().getState().nodes().getMasterNodeId(), + nullValue()); fail("should not be able to find master"); } catch (MasterNotDiscoveredException e) { // all is well, no master elected } logger.info("--> start another node"); internalCluster().startNode(); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); logger.info("--> stop master node"); internalCluster().stopCurrentMasterNode(); try { - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), nullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + nullValue()); fail("should not be able to find master"); } catch (MasterNotDiscoveredException e) { // all is well, no master elected @@ -84,6 +87,7 @@ public class AzureMinimumMasterNodesTests extends AbstractAzureComputeServiceTes logger.info("--> start another node"); internalCluster().startNode(); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); } } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java similarity index 82% rename from plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java index 7a85909a13e..d2234632122 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java @@ -17,12 +17,12 @@ * under the License. */ -package org.elasticsearch.discovery.azure; +package org.elasticsearch.discovery.azure.classic; -import org.elasticsearch.cloud.azure.AbstractAzureComputeServiceTestCase; -import org.elasticsearch.cloud.azure.AzureComputeServiceSimpleMock; -import org.elasticsearch.cloud.azure.management.AzureComputeService.Discovery; -import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; +import org.elasticsearch.cloud.azure.classic.AbstractAzureComputeServiceTestCase; +import org.elasticsearch.cloud.azure.classic.AzureComputeServiceSimpleMock; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Discovery; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Management; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; @@ -38,14 +38,15 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { super(AzureComputeServiceSimpleMock.TestPlugin.class); } - public void testOneNodeDhouldRunUsingPrivateIp() { + public void testOneNodeShouldRunUsingPrivateIp() { Settings.Builder settings = Settings.builder() .put(Management.SERVICE_NAME_SETTING.getKey(), "dummy") .put(Discovery.HOST_TYPE_SETTING.getKey(), "private_ip"); logger.info("--> start one node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); // We expect having 1 node as part of the cluster, let's test that checkNumberOfNodes(1); @@ -58,7 +59,8 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { logger.info("--> start one node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); // We expect having 1 node as part of the cluster, let's test that checkNumberOfNodes(1); diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureTwoStartedNodesTests.java similarity index 78% rename from plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java rename to plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureTwoStartedNodesTests.java index 6431696c2eb..35844c9b383 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureTwoStartedNodesTests.java @@ -17,12 +17,12 @@ * under the License. */ -package org.elasticsearch.discovery.azure; +package org.elasticsearch.discovery.azure.classic; -import org.elasticsearch.cloud.azure.AbstractAzureComputeServiceTestCase; -import org.elasticsearch.cloud.azure.AzureComputeServiceTwoNodesMock; -import org.elasticsearch.cloud.azure.management.AzureComputeService.Discovery; -import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; +import org.elasticsearch.cloud.azure.classic.AbstractAzureComputeServiceTestCase; +import org.elasticsearch.cloud.azure.classic.AzureComputeServiceTwoNodesMock; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Discovery; +import org.elasticsearch.cloud.azure.classic.management.AzureComputeService.Management; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; @@ -46,11 +46,13 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa logger.info("--> start first node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); logger.info("--> start another node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); // We expect having 2 nodes as part of the cluster, let's test that checkNumberOfNodes(2); @@ -64,11 +66,13 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa logger.info("--> start first node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); logger.info("--> start another node"); internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().getMasterNodeId(), notNullValue()); + assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").get().getState().nodes().getMasterNodeId(), + notNullValue()); // We expect having 2 nodes as part of the cluster, let's test that checkNumberOfNodes(2); diff --git a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yaml b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yaml new file mode 100644 index 00000000000..ea042d8a52d --- /dev/null +++ b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for Azure Classic Discovery component +# +"Discovery Azure Classic loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: discovery-azure-classic } diff --git a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml b/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml deleted file mode 100644 index 7a5acd1f001..00000000000 --- a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# Integration tests for Azure Discovery component -# -"Discovery Azure loaded": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - match: { nodes.$master.plugins.0.name: discovery-azure } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 2aec30d6ddb..2fc82a01f09 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -49,7 +49,7 @@ import java.util.Random; /** * */ -public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; @@ -148,6 +148,8 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent endpoint = "ec2.ap-southeast-1.amazonaws.com"; } else if (region.equals("us-gov-west") || region.equals("us-gov-west-1")) { endpoint = "ec2.us-gov-west-1.amazonaws.com"; + } else if (region.equals("ap-south-1")) { + endpoint = "ec2.ap-south-1.amazonaws.com"; } else if (region.equals("ap-southeast-2")) { endpoint = "ec2.ap-southeast-2.amazonaws.com"; } else if (region.equals("ap-northeast") || region.equals("ap-northeast-1")) { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java index f14a80f01cc..19a5c8d3b96 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.settings.Settings; import java.util.List; -public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { private int nodes; private List> tagsList; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java index a6faa390e5d..00a44cd56af 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeService.java @@ -31,7 +31,7 @@ import java.util.Collections; import java.util.List; import java.util.function.Function; -public interface GceComputeService extends LifecycleComponent { +public interface GceComputeService extends LifecycleComponent { /** * GCE API Version: Elasticsearch/GceCloud/1.0 diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java index 85e0910736f..8d1090dd82c 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceComputeServiceImpl.java @@ -56,7 +56,7 @@ import java.util.Collections; import java.util.List; import java.util.function.Function; -public class GceComputeServiceImpl extends AbstractLifecycleComponent +public class GceComputeServiceImpl extends AbstractLifecycleComponent implements GceComputeService { // all settings just used for testing - not registered by default @@ -97,7 +97,8 @@ public class GceComputeServiceImpl extends AbstractLifecycleComponentemptyList() : instanceList.getItems(); + return instanceList.isEmpty() || instanceList.getItems() == null ? + Collections.emptyList() : instanceList.getItems(); } catch (PrivilegedActionException e) { logger.warn("Problem fetching instance list for zone {}", e, zoneId); logger.debug("Full exception:", e); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index c9dd2263245..be3d737b919 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -249,8 +249,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas } } - } catch (Throwable e) { - logger.warn("Exception caught during discovery: {}", e, e.getMessage()); + } catch (Exception e) { + logger.warn("exception caught during discovery", e); } logger.debug("{} node(s) added", cachedDiscoNodes.size()); diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index 07b2ef774b4..92eb12a99b2 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -254,4 +254,19 @@ public class GceDiscoveryTests extends ESTestCase { assertThat(expected.getMessage(), containsString("one or more gce discovery settings are missing.")); } } + + /** + * For issue https://github.com/elastic/elasticsearch/issues/16967: + * When using multiple regions and one of them has no instance at all, this + * was producing a NPE as a result. + */ + public void testNoRegionReturnsEmptyList() { + Settings nodeSettings = Settings.builder() + .put(GceComputeService.PROJECT_SETTING.getKey(), projectName) + .putArray(GceComputeService.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a") + .build(); + mock = new GceComputeServiceMock(nodeSettings, networkService); + List discoveryNodes = buildDynamicNodes(mock, nodeSettings); + assertThat(discoveryNodes, hasSize(1)); + } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java index aa6e91fabdf..a49124749fe 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java @@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.lessThan; public class RetryHttpInitializerWrapperTests extends ESTestCase { - static private class FailThenSuccessBackoffTransport extends MockHttpTransport { + private static class FailThenSuccessBackoffTransport extends MockHttpTransport { public int lowLevelExecCalls; int errorStatusCode; diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/europe-west1-b/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/europe-west1-b/instances new file mode 100644 index 00000000000..049e0e1e1b1 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/europe-west1-b/instances @@ -0,0 +1,36 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 1", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test1", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.59" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ] +} diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/us-central1-a/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/us-central1-a/instances new file mode 100644 index 00000000000..989b7507fe8 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/noregionreturnsemptylist/zones/us-central1-a/instances @@ -0,0 +1,3 @@ +{ + "id": "dummy" +} diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java index 40ebe2592ab..b06232e1c41 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java @@ -25,8 +25,8 @@ import org.apache.tika.metadata.TikaCoreProperties; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.io.IOException; import java.util.Arrays; @@ -122,7 +122,7 @@ public final class AttachmentProcessor extends AbstractProcessor { String length = Strings.hasLength(contentLength) ? contentLength : String.valueOf(parsedContent.length()); additionalFields.put(Property.CONTENT_LENGTH.toLowerCase(), length); } - } catch (Throwable e) { + } catch (Exception e) { throw new ElasticsearchParseException("Error parsing document in field [{}]", e, field); } @@ -150,12 +150,12 @@ public final class AttachmentProcessor extends AbstractProcessor { return indexedChars; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { static final Set DEFAULT_PROPERTIES = EnumSet.allOf(Property.class); @Override - public AttachmentProcessor doCreate(String processorTag, Map config) throws Exception { + public AttachmentProcessor create(String processorTag, Map config) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment"); List properyNames = readOptionalList(TYPE, processorTag, config, "properties"); diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java index 6bd4e07702e..8d011056854 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.attachment; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -46,9 +46,8 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("attachment")); @@ -62,8 +61,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("indexed_chars", indexedChars); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getIndexedChars(), is(indexedChars)); } @@ -72,7 +70,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("target_field", "_field"); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -89,7 +87,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("properties", fieldNames); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); } @@ -99,7 +97,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", Collections.singletonList("invalid")); try { - factory.create(config); + factory.create(null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[properties] illegal field option [invalid]")); @@ -113,7 +111,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", "invalid"); try { - factory.create(config); + factory.create(null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java index 0c63f65c247..4b9a40dd8a9 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java @@ -58,7 +58,7 @@ public class TikaDocTests extends ESTestCase { assertNotNull(parsedContent); assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index c29bf4aa65d..64f0994b7d9 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -34,8 +34,8 @@ import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.io.Closeable; import java.io.IOException; @@ -217,7 +217,7 @@ public final class GeoIpProcessor extends AbstractProcessor { return geoData; } - public static final class Factory extends AbstractProcessorFactory implements Closeable { + public static final class Factory implements Processor.Factory { static final Set DEFAULT_CITY_PROPERTIES = EnumSet.of( Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION @@ -231,7 +231,7 @@ public final class GeoIpProcessor extends AbstractProcessor { } @Override - public GeoIpProcessor doCreate(String processorTag, Map config) throws Exception { + public GeoIpProcessor create(String processorTag, Map config) throws Exception { String ipField = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb.gz"); @@ -267,17 +267,12 @@ public final class GeoIpProcessor extends AbstractProcessor { return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties); } - - @Override - public void close() throws IOException { - IOUtils.close(databaseReaders.values()); - } } // Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code // inside a PrivilegedAction code block, we are forced to catch any checked exception and rethrow // it with an unchecked exception. - private final static class AddressNotFoundRuntimeException extends RuntimeException { + private static final class AddressNotFoundRuntimeException extends RuntimeException { public AddressNotFoundRuntimeException(Throwable cause) { super(cause); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index 8339ff1fc60..d814ae46bea 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -20,9 +20,11 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.node.NodeModule; import org.elasticsearch.plugins.Plugin; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -36,11 +38,16 @@ import java.util.Map; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; -public class IngestGeoIpPlugin extends Plugin { +public class IngestGeoIpPlugin extends Plugin implements Closeable { + + private Map databaseReaders; public void onModule(NodeModule nodeModule) throws IOException { + if (databaseReaders != null) { + throw new IllegalStateException("called onModule twice for geoip plugin!!"); + } Path geoIpConfigDirectory = nodeModule.getNode().getEnvironment().configFile().resolve("ingest-geoip"); - Map databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); + databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); nodeModule.registerProcessor(GeoIpProcessor.TYPE, (registry) -> new GeoIpProcessor.Factory(databaseReaders)); } @@ -65,4 +72,11 @@ public class IngestGeoIpPlugin extends Plugin { } return Collections.unmodifiableMap(databaseReaders); } + + @Override + public void close() throws IOException { + if (databaseReaders != null) { + IOUtils.close(databaseReaders.values()); + } + } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 04729162729..28043171fcd 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Randomness; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.AfterClass; @@ -74,11 +73,9 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); - String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); @@ -92,11 +89,9 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb.gz"); - String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); @@ -109,7 +104,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("target_field", "_field"); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -119,7 +114,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb.gz"); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); @@ -135,7 +130,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", Collections.singletonList(cityProperty)); try { - factory.create(config); + factory.create(null, config); fail("Exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty + @@ -150,7 +145,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("database_file", "does-not-exist.mmdb.gz"); try { - factory.create(config); + factory.create(null, config); fail("Exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[database_file] database file [does-not-exist.mmdb.gz] doesn't exist")); @@ -171,7 +166,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("properties", fieldNames); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); } @@ -183,7 +178,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", Collections.singletonList("invalid")); try { - factory.create(config); + factory.create(null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + @@ -194,7 +189,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", "invalid"); try { - factory.create(config); + factory.create("tag", config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); diff --git a/core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateXContentTests.java b/plugins/ingest-useragent/build.gradle similarity index 72% rename from core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateXContentTests.java rename to plugins/ingest-useragent/build.gradle index 359a582e169..86fbf243497 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/deflate/DeflateXContentTests.java +++ b/plugins/ingest-useragent/build.gradle @@ -16,15 +16,14 @@ * specific language governing permissions and limitations * under the License. */ - -package org.elasticsearch.common.compress.deflate; - -import org.elasticsearch.common.compress.AbstractCompressedXContentTestCase; - -public class DeflateXContentTests extends AbstractCompressedXContentTestCase { - - public DeflateXContentTests() { - super(new DeflateCompressor()); - } - + +esplugin { + description 'Ingest processor that extracts information from a user agent' + classname 'org.elasticsearch.ingest.useragent.IngestUserAgentPlugin' } + +integTest { + cluster { + extraConfigFile 'ingest-useragent/test-regexes.yaml', 'test/test-regexes.yaml' + } +} \ No newline at end of file diff --git a/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java new file mode 100644 index 00000000000..e86a8443d7e --- /dev/null +++ b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.node.NodeModule; +import org.elasticsearch.plugins.Plugin; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.StandardOpenOption; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +public class IngestUserAgentPlugin extends Plugin { + + private final Setting CACHE_SIZE_SETTING = Setting.longSetting("ingest.useragent.cache_size", 1000, 0, + Setting.Property.NodeScope); + + static final String DEFAULT_PARSER_NAME = "_default_"; + + public void onModule(NodeModule nodeModule) throws IOException { + Path userAgentConfigDirectory = nodeModule.getNode().getEnvironment().configFile().resolve("ingest-useragent"); + + if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { + throw new IllegalStateException( + "the user agent directory [" + userAgentConfigDirectory + "] containing the regex file doesn't exist"); + } + + long cacheSize = CACHE_SIZE_SETTING.get(nodeModule.getNode().settings()); + + UserAgentCache cache = new UserAgentCache(cacheSize); + + Map userAgentParsers = createUserAgentParsers(userAgentConfigDirectory, cache); + + nodeModule.registerProcessor(UserAgentProcessor.TYPE, (registry) -> new UserAgentProcessor.Factory(userAgentParsers)); + } + + static Map createUserAgentParsers(Path userAgentConfigDirectory, UserAgentCache cache) throws IOException { + Map userAgentParsers = new HashMap<>(); + + UserAgentParser defaultParser = new UserAgentParser(DEFAULT_PARSER_NAME, + IngestUserAgentPlugin.class.getResourceAsStream("/regexes.yaml"), cache); + userAgentParsers.put(DEFAULT_PARSER_NAME, defaultParser); + + if (Files.exists(userAgentConfigDirectory) && Files.isDirectory(userAgentConfigDirectory)) { + PathMatcher pathMatcher = userAgentConfigDirectory.getFileSystem().getPathMatcher("glob:**.yaml"); + + try (Stream regexFiles = Files.find(userAgentConfigDirectory, 1, + (path, attr) -> attr.isRegularFile() && pathMatcher.matches(path))) { + Iterable iterable = regexFiles::iterator; + for (Path path : iterable) { + String parserName = path.getFileName().toString(); + try (InputStream regexStream = Files.newInputStream(path, StandardOpenOption.READ)) { + userAgentParsers.put(parserName, new UserAgentParser(parserName, regexStream, cache)); + } + } + } + } + + return Collections.unmodifiableMap(userAgentParsers); + } + +} diff --git a/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java new file mode 100644 index 00000000000..d1002f2df06 --- /dev/null +++ b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.ingest.useragent.UserAgentParser.Details; + +import java.util.Objects; + +class UserAgentCache { + private final Cache cache; + + UserAgentCache(long cacheSize) { + cache = CacheBuilder.builder().setMaximumWeight(cacheSize).build(); + } + + public Details get(String parserName, String userAgent) { + return cache.get(new CompositeCacheKey(parserName, userAgent)); + } + + public void put(String parserName, String userAgent, Details details) { + cache.put(new CompositeCacheKey(parserName, userAgent), details); + } + + private static final class CompositeCacheKey { + private final String parserName; + private final String userAgent; + + CompositeCacheKey(String parserName, String userAgent) { + this.parserName = parserName; + this.userAgent = userAgent; + } + + @Override + public boolean equals(Object obj) { + if(obj != null && obj instanceof CompositeCacheKey) { + CompositeCacheKey s = (CompositeCacheKey)obj; + return parserName.equals(s.parserName) && userAgent.equals(s.userAgent); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(parserName, userAgent); + } + } +} diff --git a/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java new file mode 100644 index 00000000000..af764d5baf2 --- /dev/null +++ b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +final class UserAgentParser { + + private final UserAgentCache cache; + private final List uaPatterns = new ArrayList<>(); + private final List osPatterns = new ArrayList<>(); + private final List devicePatterns = new ArrayList<>(); + private final String name; + + public UserAgentParser(String name, InputStream regexStream, UserAgentCache cache) { + this.name = name; + this.cache = cache; + + try { + init(regexStream); + } catch (IOException e) { + throw new ElasticsearchParseException("error parsing regular expression file", e); + } + } + + private void init(InputStream regexStream) throws IOException { + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(regexStream); + + XContentParser.Token token = yamlParser.nextToken(); + + if (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); + + for (; token != null; token = yamlParser.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + uaPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("family_replacement"), map.get("v1_replacement"), map.get("v2_replacement"), + map.get("v3_replacement"), map.get("v4_replacement"))); + } + } + else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + osPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("os_replacement"), map.get("os_v1_replacement"), map.get("os_v2_replacement"), + map.get("os_v3_replacement"), map.get("os_v4_replacement"))); + } + } + else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + devicePatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("device_replacement"), null, null, null, null)); + } + } + } + } + + if (uaPatterns.isEmpty() && osPatterns.isEmpty() && devicePatterns.isEmpty()) { + throw new ElasticsearchParseException("not a valid regular expression file"); + } + } + + private Pattern compilePattern(String regex, String regex_flag) { + // Only flag present in the current default regexes.yaml + if (regex_flag != null && regex_flag.equals("i")) { + return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + } else { + return Pattern.compile(regex); + } + } + + private List> readParserConfigurations(XContentParser yamlParser) throws IOException { + List > patternList = new ArrayList<>(); + + XContentParser.Token token = yamlParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new ElasticsearchParseException("malformed regular expression file, should continue with 'array' after 'object'"); + } + + token = yamlParser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("malformed regular expression file, expecting 'object'"); + } + + while (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); + + if (token != XContentParser.Token.FIELD_NAME) { + throw new ElasticsearchParseException("malformed regular expression file, should continue with 'field_name' after 'array'"); + } + + Map regexMap = new HashMap<>(); + for (; token == XContentParser.Token.FIELD_NAME; token = yamlParser.nextToken()) { + String fieldName = yamlParser.currentName(); + + token = yamlParser.nextToken(); + String fieldValue = yamlParser.text(); + regexMap.put(fieldName, fieldValue); + } + + patternList.add(regexMap); + + token = yamlParser.nextToken(); + } + + return patternList; + } + + List getUaPatterns() { + return uaPatterns; + } + + List getOsPatterns() { + return osPatterns; + } + + List getDevicePatterns() { + return devicePatterns; + } + + String getName() { + return name; + } + + public Details parse(String agentString) { + Details details = cache.get(name, agentString);; + + if (details == null) { + VersionedName userAgent = findMatch(uaPatterns, agentString); + VersionedName operatingSystem = findMatch(osPatterns, agentString); + VersionedName device = findMatch(devicePatterns, agentString); + + details = new Details(userAgent, operatingSystem, device); + + cache.put(name, agentString, details); + } + + return details; + } + + private VersionedName findMatch(List possiblePatterns, String agentString) { + VersionedName name; + for (UserAgentSubpattern pattern : possiblePatterns) { + name = pattern.match(agentString); + + if (name != null) { + return name; + } + } + + return null; + } + + static final class Details { + public final VersionedName userAgent; + public final VersionedName operatingSystem; + public final VersionedName device; + + public Details(VersionedName userAgent, VersionedName operatingSystem, VersionedName device) { + this.userAgent = userAgent; + this.operatingSystem = operatingSystem; + this.device = device; + } + } + + static final class VersionedName { + public final String name; + public final String major; + public final String minor; + public final String patch; + public final String build; + + public VersionedName(String name, String major, String minor, String patch, String build) { + this.name = name; + this.major = major; + this.minor = minor; + this.patch = patch; + this.build = build; + } + } + + /** + * One of: user agent, operating system, device + */ + static final class UserAgentSubpattern { + private final Pattern pattern; + private final String nameReplacement, v1Replacement, v2Replacement, v3Replacement, v4Replacement; + + public UserAgentSubpattern(Pattern pattern, String nameReplacement, + String v1Replacement, String v2Replacement, String v3Replacement, String v4Replacement) { + this.pattern = pattern; + this.nameReplacement = nameReplacement; + this.v1Replacement = v1Replacement; + this.v2Replacement = v2Replacement; + this.v3Replacement = v3Replacement; + this.v4Replacement = v4Replacement; + } + + public VersionedName match(String agentString) { + String name = null, major = null, minor = null, patch = null, build = null; + Matcher matcher = pattern.matcher(agentString); + + if (!matcher.find()) { + return null; + } + + int groupCount = matcher.groupCount(); + + if (nameReplacement != null) { + if (nameReplacement.contains("$1") && groupCount >= 1 && matcher.group(1) != null) { + name = nameReplacement.replaceFirst("\\$1", Matcher.quoteReplacement(matcher.group(1))); + } else { + name = nameReplacement; + } + } else if (groupCount >= 1) { + name = matcher.group(1); + } + + if (v1Replacement != null) { + major = v1Replacement; + } else if (groupCount >= 2) { + major = matcher.group(2); + } + + if (v2Replacement != null) { + minor = v2Replacement; + } else if (groupCount >= 3) { + minor = matcher.group(3); + } + + if (v3Replacement != null) { + patch = v3Replacement; + } else if (groupCount >= 4) { + patch = matcher.group(4); + } + + if (v4Replacement != null) { + build = v4Replacement; + } else if (groupCount >= 5) { + build = matcher.group(5); + } + + return name == null ? null : new VersionedName(name, major, minor, patch, build); + } + } +} diff --git a/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java new file mode 100644 index 00000000000..7cb829e36e2 --- /dev/null +++ b/plugins/ingest-useragent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java @@ -0,0 +1,241 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.useragent.UserAgentParser.Details; +import org.elasticsearch.ingest.useragent.UserAgentParser.VersionedName; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; +import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; + +public class UserAgentProcessor extends AbstractProcessor { + + public static final String TYPE = "useragent"; + + private final String field; + private final String targetField; + private final Set properties; + + private final UserAgentParser parser; + + public UserAgentProcessor(String tag, String field, String targetField, UserAgentParser parser, Set properties) { + super(tag); + this.field = field; + this.targetField = targetField; + this.parser = parser; + this.properties = properties; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + String userAgent = ingestDocument.getFieldValue(field, String.class); + + Details uaClient = parser.parse(userAgent); + + Map uaDetails = new HashMap<>(); + for (Property property : this.properties) { + switch (property) { + case NAME: + if (uaClient.userAgent != null && uaClient.userAgent.name != null) { + uaDetails.put("name", uaClient.userAgent.name); + } + else { + uaDetails.put("name", "Other"); + } + break; + case MAJOR: + if (uaClient.userAgent != null && uaClient.userAgent.major != null) { + uaDetails.put("major", uaClient.userAgent.major); + } + break; + case MINOR: + if (uaClient.userAgent != null && uaClient.userAgent.minor != null) { + uaDetails.put("minor", uaClient.userAgent.minor); + } + break; + case PATCH: + if (uaClient.userAgent != null && uaClient.userAgent.patch != null) { + uaDetails.put("patch", uaClient.userAgent.patch); + } + break; + case BUILD: + if (uaClient.userAgent != null && uaClient.userAgent.build != null) { + uaDetails.put("build", uaClient.userAgent.build); + } + break; + case OS: + if (uaClient.operatingSystem != null) { + uaDetails.put("os", buildFullOSName(uaClient.operatingSystem)); + } + else { + uaDetails.put("os", "Other"); + } + + break; + case OS_NAME: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.name != null) { + uaDetails.put("os_name", uaClient.operatingSystem.name); + } + else { + uaDetails.put("os_name", "Other"); + } + break; + case OS_MAJOR: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.major != null) { + uaDetails.put("os_major", uaClient.operatingSystem.major); + } + break; + case OS_MINOR: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.minor != null) { + uaDetails.put("os_minor", uaClient.operatingSystem.minor); + } + break; + case DEVICE: + if (uaClient.device != null && uaClient.device.name != null) { + uaDetails.put("device", uaClient.device.name); + } + else { + uaDetails.put("device", "Other"); + } + break; + } + } + + ingestDocument.setFieldValue(targetField, uaDetails); + } + + /** To maintain compatibility with logstash-filter-useragent */ + private String buildFullOSName(VersionedName operatingSystem) { + if (operatingSystem == null || operatingSystem.name == null) { + return null; + } + + StringBuilder sb = new StringBuilder(operatingSystem.name); + + if (operatingSystem.major != null) { + sb.append(" "); + sb.append(operatingSystem.major); + + if (operatingSystem.minor != null) { + sb.append("."); + sb.append(operatingSystem.minor); + + if (operatingSystem.patch != null) { + sb.append("."); + sb.append(operatingSystem.patch); + + if (operatingSystem.build != null) { + sb.append("."); + sb.append(operatingSystem.build); + } + } + } + } + + return sb.toString(); + } + + @Override + public String getType() { + return TYPE; + } + + String getField() { + return field; + } + + String getTargetField() { + return targetField; + } + + Set getProperties() { + return properties; + } + + UserAgentParser getUaParser() { + return parser; + } + + public static final class Factory implements Processor.Factory { + + private final Map userAgentParsers; + + public Factory(Map userAgentParsers) { + this.userAgentParsers = userAgentParsers; + } + + @Override + public UserAgentProcessor create(String processorTag, Map config) throws Exception { + String field = readStringProperty(TYPE, processorTag, config, "field"); + String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "useragent"); + String regexFilename = readStringProperty(TYPE, processorTag, config, "regex_file", IngestUserAgentPlugin.DEFAULT_PARSER_NAME); + List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); + + UserAgentParser parser = userAgentParsers.get(regexFilename); + if (parser == null) { + throw newConfigurationException(TYPE, processorTag, + "regex_file", "regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)"); + } + + final Set properties; + if (propertyNames != null) { + properties = EnumSet.noneOf(Property.class); + for (String fieldName : propertyNames) { + try { + properties.add(Property.parseProperty(fieldName)); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); + } + } + } else { + properties = EnumSet.allOf(Property.class); + } + + return new UserAgentProcessor(processorTag, field, targetField, parser, properties); + } + } + + enum Property { + + NAME, MAJOR, MINOR, PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD; + + public static Property parseProperty(String propertyName) { + try { + return valueOf(propertyName.toUpperCase(Locale.ROOT)); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException("illegal property value [" + propertyName + "]. valid values are " + + Arrays.toString(EnumSet.allOf(Property.class).toArray())); + } + } + } +} diff --git a/plugins/ingest-useragent/src/main/resources/regexes.yaml b/plugins/ingest-useragent/src/main/resources/regexes.yaml new file mode 100644 index 00000000000..cc28842308c --- /dev/null +++ b/plugins/ingest-useragent/src/main/resources/regexes.yaml @@ -0,0 +1,4815 @@ +# Apache License, Version 2.0 +# =========================== +# +# Copyright 2009 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +user_agent_parsers: + #### SPECIAL CASES TOP #### + + # @note: iOS / OSX Applications + - regex: '(CFNetwork)(?:/(\d+)\.(\d+)\.?(\d+)?)?' + family_replacement: 'CFNetwork' + + # Pingdom + - regex: '(Pingdom.com_bot_version_)(\d+)\.(\d+)' + family_replacement: 'PingdomBot' + + # Facebook + - regex: '(facebookexternalhit)/(\d+)\.(\d+)' + family_replacement: 'FacebookBot' + + # Google Plus + - regex: 'Google.*/\+/web/snippet' + family_replacement: 'GooglePlusBot' + + # Twitter + - regex: '(Twitterbot)/(\d+)\.(\d+)' + family_replacement: 'TwitterBot' + + # Bots Pattern '/name-0.0' + - regex: '/((?:Ant-)?Nutch|[A-z]+[Bb]ot|[A-z]+[Ss]pider|Axtaris|fetchurl|Isara|ShopSalad|Tailsweep)[ \-](\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots Pattern 'name/0.0' + - regex: '(008|Altresium|Argus|BaiduMobaider|BoardReader|DNSGroup|DataparkSearch|EDI|Goodzer|Grub|INGRID|Infohelfer|LinkedInBot|LOOQ|Nutch|PathDefender|Peew|PostPost|Steeler|Twitterbot|VSE|WebCrunch|WebZIP|Y!J-BR[A-Z]|YahooSeeker|envolk|sproose|wminer)/(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + + # MSIECrawler + - regex: '(MSIE) (\d+)\.(\d+)([a-z]\d?)?;.* MSIECrawler' + family_replacement: 'MSIECrawler' + + # Downloader ... + - regex: '(Google-HTTP-Java-Client|Apache-HttpClient|http%20client|Python-urllib|HttpMonitor|TLSProber|WinHTTP|JNLP)(?:[ /](\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # Bots + - regex: '(1470\.net crawler|50\.nu|8bo Crawler Bot|Aboundex|Accoona-[A-z]+-Agent|AdsBot-Google(?:-[a-z]+)?|altavista|AppEngine-Google|archive.*?\.org_bot|archiver|Ask Jeeves|[Bb]ai[Dd]u[Ss]pider(?:-[A-Za-z]+)*|bingbot|BingPreview|blitzbot|BlogBridge|BoardReader(?: [A-Za-z]+)*|boitho.com-dc|BotSeer|\b\w*favicon\w*\b|\bYeti(?:-[a-z]+)?|Catchpoint bot|[Cc]harlotte|Checklinks|clumboot|Comodo HTTP\(S\) Crawler|Comodo-Webinspector-Crawler|ConveraCrawler|CRAWL-E|CrawlConvera|Daumoa(?:-feedfetcher)?|Feed Seeker Bot|findlinks|Flamingo_SearchEngine|FollowSite Bot|furlbot|Genieo|gigabot|GomezAgent|gonzo1|(?:[a-zA-Z]+-)?Googlebot(?:-[a-zA-Z]+)?|Google SketchUp|grub-client|gsa-crawler|heritrix|HiddenMarket|holmes|HooWWWer|htdig|ia_archiver|ICC-Crawler|Icarus6j|ichiro(?:/mobile)?|IconSurf|IlTrovatore(?:-Setaccio)?|InfuzApp|Innovazion Crawler|InternetArchive|IP2[a-z]+Bot|jbot\b|KaloogaBot|Kraken|Kurzor|larbin|LEIA|LesnikBot|Linguee Bot|LinkAider|LinkedInBot|Lite Bot|Llaut|lycos|Mail\.RU_Bot|masidani_bot|Mediapartners-Google|Microsoft .*? Bot|mogimogi|mozDex|MJ12bot|msnbot(?:-media *)?|msrbot|netresearch|Netvibes|NewsGator[^/]*|^NING|Nutch[^/]*|Nymesis|ObjectsSearch|Orbiter|OOZBOT|PagePeeker|PagesInventory|PaxleFramework|Peeplo Screenshot Bot|PlantyNet_WebRobot|Pompos|Read%20Later|Reaper|RedCarpet|Retreiver|Riddler|Rival IQ|scooter|Scrapy|Scrubby|searchsight|seekbot|semanticdiscovery|Simpy|SimplePie|SEOstats|SimpleRSS|SiteCon|Slurp|snappy|Speedy Spider|Squrl Java|TheUsefulbot|ThumbShotsBot|Thumbshots\.ru|TwitterBot|URL2PNG|Vagabondo|VoilaBot|^vortex|Votay bot|^voyager|WASALive.Bot|Web-sniffer|WebThumb|WeSEE:[A-z]+|WhatWeb|WIRE|WordPress|Wotbox|www\.almaden\.ibm\.com|Xenu(?:.s)? Link Sleuth|Xerka [A-z]+Bot|yacy(?:bot)?|Yahoo[a-z]*Seeker|Yahoo! Slurp|Yandex\w+|YodaoBot(?:-[A-z]+)?|YottaaMonitor|Yowedo|^Zao|^Zao-Crawler|ZeBot_www\.ze\.bz|ZooShot|ZyBorg)(?:[ /]v?(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # Bots General matcher 'name/0.0' + - regex: '(?:\/[A-Za-z0-9\.]+)? *([A-Za-z0-9 \-_\!\[\]:]*(?:[Aa]rchiver|[Ii]ndexer|[Ss]craper|[Bb]ot|[Ss]pider|[Cc]rawl[a-z]*))/(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots General matcher 'name 0.0' + - regex: '(?:\/[A-Za-z0-9\.]+)? *([A-Za-z0-9 _\!\[\]:]*(?:[Aa]rchiver|[Ii]ndexer|[Ss]craper|[Bb]ot|[Ss]pider|[Cc]rawl[a-z]*)) (\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots containing spider|scrape|bot(but not CUBOT)|Crawl + - regex: '((?:[A-z0-9]+|[A-z\-]+ ?)?(?: the )?(?:[Ss][Pp][Ii][Dd][Ee][Rr]|[Ss]crape|[A-Za-z0-9-]*(?:[^C][^Uu])[Bb]ot|[Cc][Rr][Aa][Ww][Ll])[A-z0-9]*)(?:(?:[ /]| v)(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # HbbTV standard defines what features the browser should understand. + # but it's like targeting "HTML5 browsers", effective browser support depends on the model + # See os_parsers if you want to target a specific TV + - regex: '(HbbTV)/(\d+)\.(\d+)\.(\d+) \(' + + # must go before Firefox to catch Chimera/SeaMonkey/Camino + - regex: '(Chimera|SeaMonkey|Camino)/(\d+)\.(\d+)\.?([ab]?\d+[a-z]*)?' + + # Social Networks + # Facebook + - regex: '\[FB.*;(FBAV)/(\d+)(?:\.(\d+)(?:\.(\d)+)?)?' + family_replacement: 'Facebook' + # Pinterest + - regex: '\[(Pinterest)/[^\]]+\]' + - regex: '(Pinterest)(?: for Android(?: Tablet)?)?/(\d+)(?:\.(\d+)(?:\.(\d)+)?)?' + + # Firefox + - regex: '(Pale[Mm]oon)/(\d+)\.(\d+)\.?(\d+)?' + family_replacement: 'Pale Moon (Firefox Variant)' + - regex: '(Fennec)/(\d+)\.(\d+)\.?([ab]?\d+[a-z]*)' + family_replacement: 'Firefox Mobile' + - regex: '(Fennec)/(\d+)\.(\d+)(pre)' + family_replacement: 'Firefox Mobile' + - regex: '(Fennec)/(\d+)\.(\d+)' + family_replacement: 'Firefox Mobile' + - regex: '(?:Mobile|Tablet);.*(Firefox)/(\d+)\.(\d+)' + family_replacement: 'Firefox Mobile' + - regex: '(Namoroka|Shiretoko|Minefield)/(\d+)\.(\d+)\.(\d+(?:pre)?)' + family_replacement: 'Firefox ($1)' + - regex: '(Firefox)/(\d+)\.(\d+)(a\d+[a-z]*)' + family_replacement: 'Firefox Alpha' + - regex: '(Firefox)/(\d+)\.(\d+)(b\d+[a-z]*)' + family_replacement: 'Firefox Beta' + - regex: '(Firefox)-(?:\d+\.\d+)?/(\d+)\.(\d+)(a\d+[a-z]*)' + family_replacement: 'Firefox Alpha' + - regex: '(Firefox)-(?:\d+\.\d+)?/(\d+)\.(\d+)(b\d+[a-z]*)' + family_replacement: 'Firefox Beta' + - regex: '(Namoroka|Shiretoko|Minefield)/(\d+)\.(\d+)([ab]\d+[a-z]*)?' + family_replacement: 'Firefox ($1)' + - regex: '(Firefox).*Tablet browser (\d+)\.(\d+)\.(\d+)' + family_replacement: 'MicroB' + - regex: '(MozillaDeveloperPreview)/(\d+)\.(\d+)([ab]\d+[a-z]*)?' + - regex: '(FxiOS)/(\d+)\.(\d+)(\.(\d+))?(\.(\d+))?' + family_replacement: 'Firefox iOS' + + # e.g.: Flock/2.0b2 + - regex: '(Flock)/(\d+)\.(\d+)(b\d+?)' + + # RockMelt + - regex: '(RockMelt)/(\d+)\.(\d+)\.(\d+)' + + # e.g.: Fennec/0.9pre + - regex: '(Navigator)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Netscape' + + - regex: '(Navigator)/(\d+)\.(\d+)([ab]\d+)' + family_replacement: 'Netscape' + + - regex: '(Netscape6)/(\d+)\.(\d+)\.?([ab]?\d+)?' + family_replacement: 'Netscape' + + - regex: '(MyIBrow)/(\d+)\.(\d+)' + family_replacement: 'My Internet Browser' + + # Opera will stop at 9.80 and hide the real version in the Version string. + # see: http://dev.opera.com/articles/view/opera-ua-string-changes/ + - regex: '(Opera Tablet).*Version/(\d+)\.(\d+)(?:\.(\d+))?' + - regex: '(Opera Mini)(?:/att)?/?(\d+)?(?:\.(\d+))?(?:\.(\d+))?' + - regex: '(Opera)/.+Opera Mobi.+Version/(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + - regex: '(Opera)/(\d+)\.(\d+).+Opera Mobi' + family_replacement: 'Opera Mobile' + - regex: 'Opera Mobi.+(Opera)(?:/|\s+)(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + - regex: 'Opera Mobi' + family_replacement: 'Opera Mobile' + - regex: '(Opera)/9.80.*Version/(\d+)\.(\d+)(?:\.(\d+))?' + + # Opera 14 for Android uses a WebKit render engine. + - regex: '(?:Mobile Safari).*(OPR)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + + # Opera >=15 for Desktop is similar to Chrome but includes an "OPR" Version string. + - regex: '(?:Chrome).*(OPR)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Opera' + + # Opera Coast + - regex: '(Coast)/(\d+).(\d+).(\d+)' + family_replacement: 'Opera Coast' + + # Opera Mini for iOS (from version 8.0.0) + - regex: '(OPiOS)/(\d+).(\d+).(\d+)' + family_replacement: 'Opera Mini' + + # Palm WebOS looks a lot like Safari. + - regex: '(hpw|web)OS/(\d+)\.(\d+)(?:\.(\d+))?' + family_replacement: 'webOS Browser' + + # LuaKit has no version info. + # http://luakit.org/projects/luakit/ + - regex: '(luakit)' + family_replacement: 'LuaKit' + + # Snowshoe + - regex: '(Snowshoe)/(\d+)\.(\d+).(\d+)' + + # Lightning (for Thunderbird) + # http://www.mozilla.org/projects/calendar/lightning/ + - regex: '(Lightning)/(\d+)\.(\d+)\.?((?:[ab]?\d+[a-z]*)|(?:\d*))' + + # Swiftfox + - regex: '(Firefox)/(\d+)\.(\d+)\.(\d+(?:pre)?) \(Swiftfox\)' + family_replacement: 'Swiftfox' + - regex: '(Firefox)/(\d+)\.(\d+)([ab]\d+[a-z]*)? \(Swiftfox\)' + family_replacement: 'Swiftfox' + + # Rekonq + - regex: '(rekonq)/(\d+)\.(\d+)\.?(\d+)? Safari' + family_replacement: 'Rekonq' + - regex: 'rekonq' + family_replacement: 'Rekonq' + + # Conkeror lowercase/uppercase + # http://conkeror.org/ + - regex: '(conkeror|Conkeror)/(\d+)\.(\d+)\.?(\d+)?' + family_replacement: 'Conkeror' + + # catches lower case konqueror + - regex: '(konqueror)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Konqueror' + + - regex: '(WeTab)-Browser' + + - regex: '(Comodo_Dragon)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Comodo Dragon' + + - regex: '(Symphony) (\d+).(\d+)' + + - regex: '(Minimo)' + + - regex: 'PLAYSTATION 3.+WebKit' + family_replacement: 'NetFront NX' + - regex: 'PLAYSTATION 3' + family_replacement: 'NetFront' + - regex: '(PlayStation Portable)' + family_replacement: 'NetFront' + - regex: '(PlayStation Vita)' + family_replacement: 'NetFront NX' + + - regex: 'AppleWebKit.+ (NX)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'NetFront NX' + - regex: '(Nintendo 3DS)' + family_replacement: 'NetFront NX' + + # Amazon Silk, should go before Safari and Chrome Mobile + - regex: '(Silk)/(\d+)\.(\d+)(?:\.([0-9\-]+))?' + family_replacement: 'Amazon Silk' + + + # @ref: http://www.puffinbrowser.com + - regex: '(Puffin)/(\d+)\.(\d+)(?:\.(\d+))?' + + # Edge Mobile + - regex: 'Windows Phone .*(Edge)/(\d+)\.(\d+)' + family_replacement: 'Edge Mobile' + + # Samsung Internet (based on Chrome, but lacking some features) + - regex: '(SamsungBrowser)/(\d+)\.(\d+)' + family_replacement: 'Samsung Internet' + + # Chrome Mobile + - regex: '(CrMo)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Mobile' + - regex: '(CriOS)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Mobile iOS' + - regex: '(Chrome)/(\d+)\.(\d+)\.(\d+)\.(\d+) Mobile' + family_replacement: 'Chrome Mobile' + + # Chrome Frame must come before MSIE. + - regex: '(chromeframe)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Frame' + + # UC Browser + - regex: '(UCBrowser)[ /](\d+)\.(\d+)\.(\d+)' + family_replacement: 'UC Browser' + - regex: '(UC Browser)[ /](\d+)\.(\d+)\.(\d+)' + - regex: '(UC Browser|UCBrowser|UCWEB)(\d+)\.(\d+)\.(\d+)' + family_replacement: 'UC Browser' + + # Tizen Browser (second case included in browser/major.minor regex) + - regex: '(SLP Browser)/(\d+)\.(\d+)' + family_replacement: 'Tizen Browser' + + # Sogou Explorer 2.X + - regex: '(SE 2\.X) MetaSr (\d+)\.(\d+)' + family_replacement: 'Sogou Explorer' + + # Baidu Browsers (desktop spoofs chrome & IE, explorer is mobile) + - regex: '(baidubrowser)[/\s](\d+)' + family_replacement: 'Baidu Browser' + - regex: '(FlyFlow)/(\d+)\.(\d+)' + family_replacement: 'Baidu Explorer' + + # QQ Browsers + - regex: '(MQQBrowser/Mini)(?:(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser Mini' + - regex: '(MQQBrowser)(?:/(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser Mobile' + - regex: '(QQBrowser)(?:/(\d+)(?:\.(\d+)\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser' + + # Rackspace Monitoring + - regex: '(Rackspace Monitoring)/(\d+)\.(\d+)' + family_replacement: 'RackspaceBot' + + # PyAMF + - regex: '(PyAMF)/(\d+)\.(\d+)\.(\d+)' + + # Yandex Browser + - regex: '(YaBrowser)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Yandex Browser' + + # Mail.ru Amigo/Internet Browser (Chromium-based) + - regex: '(Chrome)/(\d+)\.(\d+)\.(\d+).* MRCHROME' + family_replacement: 'Mail.ru Chromium Browser' + + # AOL Browser (IE-based) + - regex: '(AOL) (\d+)\.(\d+); AOLBuild (\d+)' + + #### END SPECIAL CASES TOP #### + + #### MAIN CASES - this catches > 50% of all browsers #### + + # Browser/major_version.minor_version.beta_version + - regex: '(AdobeAIR|FireWeb|Jasmine|ANTGalio|Midori|Fresco|Lobo|PaleMoon|Maxthon|Lynx|OmniWeb|Dillo|Camino|Demeter|Fluid|Fennec|Epiphany|Shiira|Sunrise|Spotify|Flock|Netscape|Lunascape|WebPilot|NetFront|Netfront|Konqueror|SeaMonkey|Kazehakase|Vienna|Iceape|Iceweasel|IceWeasel|Iron|K-Meleon|Sleipnir|Galeon|GranParadiso|Opera Mini|iCab|NetNewsWire|ThunderBrowse|Iris|UP\.Browser|Bunjalloo|Google Earth|Raven for Mac|Openwave)/(\d+)\.(\d+)\.(\d+)' + + # Outlook 2007 + - regex: 'Microsoft Office Outlook 12\.\d+\.\d+|MSOffice 12' + family_replacement: 'Outlook' + v1_replacement: '2007' + + # Outlook 2010 + - regex: 'Microsoft Outlook 14\.\d+\.\d+|MSOffice 14' + family_replacement: 'Outlook' + v1_replacement: '2010' + + # Outlook 2013 + - regex: 'Microsoft Outlook 15\.\d+\.\d+' + family_replacement: 'Outlook' + v1_replacement: '2013' + + # Outlook 2016 + - regex: 'Microsoft Outlook (?:Mail )?16\.\d+\.\d+' + family_replacement: 'Outlook' + v1_replacement: '2016' + + # Windows Live Mail + - regex: 'Outlook-Express\/7\.0.*' + family_replacement: 'Windows Live Mail' + + # Apple Air Mail + - regex: '(Airmail) (\d+)\.(\d+)(?:\.(\d+))?' + + # Thunderbird + - regex: '(Thunderbird)/(\d+)\.(\d+)\.(\d+(?:pre)?)' + family_replacement: 'Thunderbird' + + # Vivaldi uses "Vivaldi" + - regex: '(Vivaldi)/(\d+)\.(\d+)\.(\d+)' + + # Edge/major_version.minor_version + - regex: '(Edge)/(\d+)\.(\d+)' + + # Brave Browser https://brave.com/ + - regex: '(brave)/(\d+)\.(\d+)\.(\d+) Chrome' + family_replacement: 'Brave' + + # Chrome/Chromium/major_version.minor_version.beta_version + - regex: '(Chromium|Chrome)/(\d+)\.(\d+)\.(\d+)' + + # Dolphin Browser + # @ref: http://www.dolphin.com + - regex: '\b(Dolphin)(?: |HDCN/|/INT\-)(\d+)\.(\d+)\.?(\d+)?' + + # Browser/major_version.minor_version + - regex: '(bingbot|Bolt|Jasmine|IceCat|Skyfire|Midori|Maxthon|Lynx|Arora|IBrowse|Dillo|Camino|Shiira|Fennec|Phoenix|Chrome|Flock|Netscape|Lunascape|Epiphany|WebPilot|Opera Mini|Opera|NetFront|Netfront|Konqueror|Googlebot|SeaMonkey|Kazehakase|Vienna|Iceape|Iceweasel|IceWeasel|Iron|K-Meleon|Sleipnir|Galeon|GranParadiso|iCab|iTunes|MacAppStore|NetNewsWire|Space Bison|Stainless|Orca|Dolfin|BOLT|Minimo|Tizen Browser|Polaris|Abrowser|Planetweb|ICE Browser|mDolphin|qutebrowser|Otter|QupZilla)/(\d+)\.(\d+)\.?(\d+)?' + + # Chrome/Chromium/major_version.minor_version + - regex: '(Chromium|Chrome)/(\d+)\.(\d+)' + + ########## + # IE Mobile needs to happen before Android to catch cases such as: + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; ANZ821)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Orange)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Vodafone)... + ########## + + # IE Mobile + - regex: '(IEMobile)[ /](\d+)\.(\d+)' + family_replacement: 'IE Mobile' + + # Browser major_version.minor_version.beta_version (space instead of slash) + - regex: '(iRider|Crazy Browser|SkipStone|iCab|Lunascape|Sleipnir|Maemo Browser) (\d+)\.(\d+)\.(\d+)' + # Browser major_version.minor_version (space instead of slash) + - regex: '(iCab|Lunascape|Opera|Android|Jasmine|Polaris) (\d+)\.(\d+)\.?(\d+)?' + + # Kindle WebKit + - regex: '(Kindle)/(\d+)\.(\d+)' + + # weird android UAs + - regex: '(Android) Donut' + v1_replacement: '1' + v2_replacement: '2' + + - regex: '(Android) Eclair' + v1_replacement: '2' + v2_replacement: '1' + + - regex: '(Android) Froyo' + v1_replacement: '2' + v2_replacement: '2' + + - regex: '(Android) Gingerbread' + v1_replacement: '2' + v2_replacement: '3' + + - regex: '(Android) Honeycomb' + v1_replacement: '3' + + # desktop mode + # http://www.anandtech.com/show/3982/windows-phone-7-review + - regex: '(MSIE) (\d+)\.(\d+).*XBLWP7' + family_replacement: 'IE Large Screen' + + #### END MAIN CASES #### + + #### SPECIAL CASES #### + - regex: '(Obigo)InternetBrowser' + - regex: '(Obigo)\-Browser' + - regex: '(Obigo|OBIGO)[^\d]*(\d+)(?:.(\d+))?' + family_replacement: 'Obigo' + + - regex: '(MAXTHON|Maxthon) (\d+)\.(\d+)' + family_replacement: 'Maxthon' + - regex: '(Maxthon|MyIE2|Uzbl|Shiira)' + v1_replacement: '0' + + - regex: '(BrowseX) \((\d+)\.(\d+)\.(\d+)' + + - regex: '(NCSA_Mosaic)/(\d+)\.(\d+)' + family_replacement: 'NCSA Mosaic' + + # Polaris/d.d is above + - regex: '(POLARIS)/(\d+)\.(\d+)' + family_replacement: 'Polaris' + - regex: '(Embider)/(\d+)\.(\d+)' + family_replacement: 'Polaris' + + - regex: '(BonEcho)/(\d+)\.(\d+)\.?([ab]?\d+)?' + family_replacement: 'Bon Echo' + + # @note: iOS / OSX Applications + - regex: '(iPod|iPhone|iPad).+Version/(\d+)\.(\d+)(?:\.(\d+))?.* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad).+Version/(\d+)\.(\d+)(?:\.(\d+))?' + family_replacement: 'Mobile Safari UI/WKWebView' + - regex: '(iPod|iPhone|iPad);.*CPU.*OS (\d+)_(\d+)(?:_(\d+))?.*Mobile.* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad);.*CPU.*OS (\d+)_(\d+)(?:_(\d+))?.*Mobile' + family_replacement: 'Mobile Safari UI/WKWebView' + - regex: '(iPod|iPhone|iPad).* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad)' + family_replacement: 'Mobile Safari UI/WKWebView' + + - regex: '(AvantGo) (\d+).(\d+)' + + - regex: '(OneBrowser)/(\d+).(\d+)' + family_replacement: 'ONE Browser' + + - regex: '(Avant)' + v1_replacement: '1' + + # This is the Tesla Model S (see similar entry in device parsers) + - regex: '(QtCarBrowser)' + v1_replacement: '1' + + - regex: '^(iBrowser/Mini)(\d+).(\d+)' + family_replacement: 'iBrowser Mini' + - regex: '^(iBrowser|iRAPP)/(\d+).(\d+)' + + # nokia browsers + # based on: http://www.developer.nokia.com/Community/Wiki/User-Agent_headers_for_Nokia_devices + - regex: '^(Nokia)' + family_replacement: 'Nokia Services (WAP) Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+).(\d+)\.(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+).(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(BrowserNG)/(\d+)\.(\d+).(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(Series60)/5\.0' + family_replacement: 'Nokia Browser' + v1_replacement: '7' + v2_replacement: '0' + - regex: '(Series60)/(\d+)\.(\d+)' + family_replacement: 'Nokia OSS Browser' + - regex: '(S40OviBrowser)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Ovi Browser' + - regex: '(Nokia)[EN]?(\d+)' + + # BlackBerry devices + - regex: '(PlayBook).+RIM Tablet OS (\d+)\.(\d+)\.(\d+)' + family_replacement: 'BlackBerry WebKit' + - regex: '(Black[bB]erry|BB10).+Version/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'BlackBerry WebKit' + - regex: '(Black[bB]erry)\s?(\d+)' + family_replacement: 'BlackBerry' + + - regex: '(OmniWeb)/v(\d+)\.(\d+)' + + - regex: '(Blazer)/(\d+)\.(\d+)' + family_replacement: 'Palm Blazer' + + - regex: '(Pre)/(\d+)\.(\d+)' + family_replacement: 'Palm Pre' + + # fork of Links + - regex: '(ELinks)/(\d+)\.(\d+)' + - regex: '(ELinks) \((\d+)\.(\d+)' + - regex: '(Links) \((\d+)\.(\d+)' + + - regex: '(QtWeb) Internet Browser/(\d+)\.(\d+)' + + #- regex: '\(iPad;.+(Version)/(\d+)\.(\d+)(?:\.(\d+))?.*Safari/' + # family_replacement: 'iPad' + + # Phantomjs, should go before Safari + - regex: '(PhantomJS)/(\d+)\.(\d+)\.(\d+)' + + # WebKit Nightly + - regex: '(AppleWebKit)/(\d+)\.?(\d+)?\+ .* Safari' + family_replacement: 'WebKit Nightly' + + # Safari + - regex: '(Version)/(\d+)\.(\d+)(?:\.(\d+))?.*Safari/' + family_replacement: 'Safari' + # Safari didn't provide "Version/d.d.d" prior to 3.0 + - regex: '(Safari)/\d+' + + - regex: '(OLPC)/Update(\d+)\.(\d+)' + + - regex: '(OLPC)/Update()\.(\d+)' + v1_replacement: '0' + + - regex: '(SEMC\-Browser)/(\d+)\.(\d+)' + + - regex: '(Teleca)' + family_replacement: 'Teleca Browser' + + - regex: '(Phantom)/V(\d+)\.(\d+)' + family_replacement: 'Phantom Browser' + + - regex: 'Trident(.*)rv.(\d+)\.(\d+)' + family_replacement: 'IE' + + # Espial + - regex: '(Espial)/(\d+)(?:\.(\d+))?(?:\.(\d+))?' + + # Apple Mail + + # apple mail - not directly detectable, have it after Safari stuff + - regex: '(AppleWebKit)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Apple Mail' + + # AFTER THE EDGE CASES ABOVE! + # AFTER IE11 + # BEFORE all other IE + - regex: '(Firefox)/(\d+)\.(\d+)\.(\d+)' + - regex: '(Firefox)/(\d+)\.(\d+)(pre|[ab]\d+[a-z]*)?' + + - regex: '([MS]?IE) (\d+)\.(\d+)' + family_replacement: 'IE' + + - regex: '(python-requests)/(\d+)\.(\d+)' + family_replacement: 'Python Requests' + + - regex: '(Java)[/ ]{0,1}\d+\.(\d+)\.(\d+)[_-]*([a-zA-Z0-9]+)*' + + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-(\d+)\.(\d+)' + +os_parsers: + ########## + # HbbTV vendors + ########## + + # starts with the easy one : Panasonic seems consistent across years, hope it will continue + #HbbTV/1.1.1 (;Panasonic;VIERA 2011;f.532;0071-0802 2000-0000;) + #HbbTV/1.1.1 (;Panasonic;VIERA 2012;1.261;0071-3103 2000-0000;) + #HbbTV/1.2.1 (;Panasonic;VIERA 2013;3.672;4101-0003 0002-0000;) + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Panasonic);VIERA ([0-9]{4});' + + # Sony is consistent too but do not place year like the other + # Opera/9.80 (Linux armv7l; HbbTV/1.1.1 (; Sony; KDL32W650A; PKG3.211EUA; 2013;); ) Presto/2.12.362 Version/12.11 + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Sony; KDL40HX751; PKG1.902EUA; 2012;);; en) Presto/2.10.250 Version/11.60 + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Sony; KDL22EX320; PKG4.017EUA; 2011;);; en) Presto/2.7.61 Version/11.00 + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(; (Sony);.*;.*; ([0-9]{4});\)' + + + # LG is consistent too, but we need to add manually the year model + #Mozilla/5.0 (Unknown; Linux armv7l) AppleWebKit/537.1+ (KHTML, like Gecko) Safari/537.1+ HbbTV/1.1.1 ( ;LGE ;NetCast 4.0 ;03.20.30 ;1.0M ;) + #Mozilla/5.0 (DirectFB; Linux armv7l) AppleWebKit/534.26+ (KHTML, like Gecko) Version/5.0 Safari/534.26+ HbbTV/1.1.1 ( ;LGE ;NetCast 3.0 ;1.0 ;1.0M ;) + - regex: 'HbbTV/\d+\.\d+\.\d+ \( ;(LG)E ;NetCast 4.0' + os_v1_replacement: '2013' + - regex: 'HbbTV/\d+\.\d+\.\d+ \( ;(LG)E ;NetCast 3.0' + os_v1_replacement: '2012' + + # Samsung is on its way of normalizing their user-agent + # HbbTV/1.1.1 (;Samsung;SmartTV2013;T-FXPDEUC-1102.2;;) WebKit + # HbbTV/1.1.1 (;Samsung;SmartTV2013;T-MST12DEUC-1102.1;;) WebKit + # HbbTV/1.1.1 (;Samsung;SmartTV2012;;;) WebKit + # HbbTV/1.1.1 (;;;;;) Maple_2011 + - regex: 'HbbTV/1.1.1 \(;;;;;\) Maple_2011' + os_replacement: 'Samsung' + os_v1_replacement: '2011' + # manage the two models of 2013 + - regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});.*FXPDEUC' + os_v2_replacement: 'UE40F7000' + - regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});.*MST12DEUC' + os_v2_replacement: 'UE32F4500' + # generic Samsung (works starting in 2012) + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});' + + # Philips : not found any other way than a manual mapping + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Philips; ; ; ; ) CE-HTML/1.0 NETTV/4.1.3 PHILIPSTV/1.1.1; en) Presto/2.10.250 Version/11.60 + # Opera/9.80 (Linux mips ; U; HbbTV/1.1.1 (; Philips; ; ; ; ) CE-HTML/1.0 NETTV/3.2.1; en) Presto/2.6.33 Version/10.70 + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/4' + os_v1_replacement: '2013' + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/3' + os_v1_replacement: '2012' + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/2' + os_v1_replacement: '2011' + + # the HbbTV emulator developers use HbbTV/1.1.1 (;;;;;) firetv-firefox-plugin 1.1.20 + - regex: 'HbbTV/\d+\.\d+\.\d+.*(firetv)-firefox-plugin (\d+).(\d+).(\d+)' + os_replacement: 'FireHbbTV' + + # generic HbbTV, hoping to catch manufacturer name (always after 2nd comma) and the first string that looks like a 2011-2019 year + - regex: 'HbbTV/\d+\.\d+\.\d+ \(.*; ?([a-zA-Z]+) ?;.*(201[1-9]).*\)' + + ########## + # @note: Windows Phone needs to come before Windows NT 6.1 *and* before Android to catch cases such as: + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; ANZ821)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Orange)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Vodafone)... + ########## + + - regex: '(Windows Phone) (?:OS[ /])?(\d+)\.(\d+)' + + ########## + # Android + # can actually detect rooted android os. do we care? + ########## + - regex: '(Android)[ \-/](\d+)\.(\d+)(?:[.\-]([a-z0-9]+))?' + + - regex: '(Android) Donut' + os_v1_replacement: '1' + os_v2_replacement: '2' + + - regex: '(Android) Eclair' + os_v1_replacement: '2' + os_v2_replacement: '1' + + - regex: '(Android) Froyo' + os_v1_replacement: '2' + os_v2_replacement: '2' + + - regex: '(Android) Gingerbread' + os_v1_replacement: '2' + os_v2_replacement: '3' + + - regex: '(Android) Honeycomb' + os_v1_replacement: '3' + + # UCWEB + - regex: '^UCWEB.*; (Adr) (\d+)\.(\d+)(?:[.\-]([a-z0-9]+))?;' + os_replacement: 'Android' + - regex: '^UCWEB.*; (iPad OS|iPh OS) (\d+)_(\d+)(?:_(\d+))?;' + os_replacement: 'iOS' + - regex: '^UCWEB.*; (wds) (\d+)\.(\d+)(?:\.(\d+))?;' + os_replacement: 'Windows Phone' + # JUC + - regex: '^(JUC).*; ?U; ?(?:Android)?(\d+)\.(\d+)(?:[\.\-]([a-z0-9]+))?' + os_replacement: 'Android' + + ########## + # Kindle Android + ########## + - regex: '(Silk-Accelerated=[a-z]{4,5})' + os_replacement: 'Android' + + ########## + # Windows + # http://en.wikipedia.org/wiki/Windows_NT#Releases + # possibility of false positive when different marketing names share same NT kernel + # e.g. windows server 2003 and windows xp + # lots of ua strings have Windows NT 4.1 !?!?!?!? !?!? !? !????!?! !!! ??? !?!?! ? + # (very) roughly ordered in terms of frequency of occurence of regex (win xp currently most frequent, etc) + ########## + + # ie mobile desktop mode + # spoofs nt 6.1. must come before windows 7 + - regex: '(XBLWP7)' + os_replacement: 'Windows Phone' + + # @note: This needs to come before Windows NT 6.1 + - regex: '(Windows ?Mobile)' + os_replacement: 'Windows Mobile' + + - regex: '(Windows (?:NT 5\.2|NT 5\.1))' + os_replacement: 'Windows XP' + + - regex: '(Windows NT 6\.1)' + os_replacement: 'Windows 7' + + - regex: '(Windows NT 6\.0)' + os_replacement: 'Windows Vista' + + - regex: '(Win 9x 4\.90)' + os_replacement: 'Windows ME' + + - regex: '(Windows 98|Windows XP|Windows ME|Windows 95|Windows CE|Windows 7|Windows NT 4\.0|Windows Vista|Windows 2000|Windows 3.1)' + + - regex: '(Windows NT 6\.2; ARM;)' + os_replacement: 'Windows RT' + - regex: '(Windows NT 6\.2)' + os_replacement: 'Windows 8' + + - regex: '(Windows NT 6\.3; ARM;)' + os_replacement: 'Windows RT 8.1' + - regex: '(Windows NT 6\.3)' + os_replacement: 'Windows 8.1' + + - regex: '(Windows NT 6\.4)' + os_replacement: 'Windows 10' + - regex: '(Windows NT 10\.0)' + os_replacement: 'Windows 10' + + - regex: '(Windows NT 5\.0)' + os_replacement: 'Windows 2000' + + - regex: '(WinNT4.0)' + os_replacement: 'Windows NT 4.0' + + - regex: '(Windows ?CE)' + os_replacement: 'Windows CE' + + - regex: 'Win ?(95|98|3.1|NT|ME|2000)' + os_replacement: 'Windows $1' + + - regex: 'Win16' + os_replacement: 'Windows 3.1' + + - regex: 'Win32' + os_replacement: 'Windows 95' + + ########## + # Tizen OS from Samsung + # spoofs Android so pushing it above + ########## + - regex: '(Tizen)/(\d+)\.(\d+)' + + ########## + # Mac OS + # @ref: http://en.wikipedia.org/wiki/Mac_OS_X#Versions + # @ref: http://www.puredarwin.org/curious/versions + ########## + - regex: '((?:Mac ?|; )OS X)[\s/](?:(\d+)[_.](\d+)(?:[_.](\d+))?|Mach-O)' + os_replacement: 'Mac OS X' + # Leopard + - regex: ' (Dar)(win)/(9).(\d+).*\((?:i386|x86_64|Power Macintosh)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '5' + # Snow Leopard + - regex: ' (Dar)(win)/(10).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '6' + # Lion + - regex: ' (Dar)(win)/(11).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '7' + # Mountain Lion + - regex: ' (Dar)(win)/(12).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '8' + # Mavericks + - regex: ' (Dar)(win)/(13).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '9' + # Yosemite is Darwin/14.x but patch versions are inconsistent in the Darwin string; + # more accurately covered by CFNetwork regexes downstream + + # IE on Mac doesn't specify version number + - regex: 'Mac_PowerPC' + os_replacement: 'Mac OS' + + # builds before tiger don't seem to specify version? + + # ios devices spoof (mac os x), so including intel/ppc prefixes + - regex: '(?:PPC|Intel) (Mac OS X)' + + ########## + # iOS + # http://en.wikipedia.org/wiki/IOS_version_history + ########## + # keep this above generic iOS, since AppleTV UAs contain 'CPU OS' + - regex: '(Apple\s?TV)(?:/(\d+)\.(\d+))?' + os_replacement: 'ATV OS X' + + - regex: '(CPU OS|iPhone OS|CPU iPhone) +(\d+)[_\.](\d+)(?:[_\.](\d+))?' + os_replacement: 'iOS' + + # remaining cases are mostly only opera uas, so catch opera as to not catch iphone spoofs + - regex: '(iPhone|iPad|iPod); Opera' + os_replacement: 'iOS' + + # few more stragglers + - regex: '(iPhone|iPad|iPod).*Mac OS X.*Version/(\d+)\.(\d+)' + os_replacement: 'iOS' + + # CFNetwork/Darwin - The specific CFNetwork or Darwin version determines + # whether the os maps to Mac OS, or iOS, or just Darwin. + # See: http://user-agents.me/cfnetwork-version-list + - regex: '(CFNetwork)/(5)48\.0\.3.* Darwin/11\.0\.0' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(5)48\.(0)\.4.* Darwin/(1)1\.0\.0' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(5)48\.(1)\.4' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(4)85\.1(3)\.9' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(6)09\.(1)\.4' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(6)(0)9' + os_replacement: 'iOS' + - regex: '(CFNetwork)/6(7)2\.(1)\.13' + os_replacement: 'iOS' + - regex: '(CFNetwork)/6(7)2\.(1)\.(1)4' + os_replacement: 'iOS' + - regex: '(CF)(Network)/6(7)(2)\.1\.15' + os_replacement: 'iOS' + os_v1_replacement: '7' + os_v2_replacement: '1' + - regex: '(CFNetwork)/6(7)2\.(0)\.(?:2|8)' + os_replacement: 'iOS' + - regex: '(CFNetwork)/709\.1' + os_replacement: 'iOS' + os_v1_replacement: '8' + os_v2_replacement: '0.b5' + - regex: '(CF)(Network)/711\.(\d)' + os_replacement: 'iOS' + os_v1_replacement: '8' + - regex: '(CF)(Network)/(720)\.(\d)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '10' + - regex: '(CF)(Network)/758\.(\d)' + os_replacement: 'iOS' + os_v1_replacement: '9' + + ########## + # CFNetwork iOS Apps + # @ref: https://en.wikipedia.org/wiki/Darwin_(operating_system)#Release_history + ########## + - regex: 'CFNetwork/.* Darwin/(9)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '1' + - regex: 'CFNetwork/.* Darwin/(10)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '4' + - regex: 'CFNetwork/.* Darwin/(11)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '5' + - regex: 'CFNetwork/.* Darwin/(13)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '6' + - regex: 'CFNetwork/6.* Darwin/(14)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '7' + - regex: 'CFNetwork/7.* Darwin/(14)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '8' + os_v2_replacement: '0' + - regex: 'CFNetwork/7.* Darwin/(15)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '9' + os_v2_replacement: '0' + # iOS Apps + - regex: '\b(iOS[ /]|iPhone(?:/| v|[ _]OS[/,]|; | OS : |\d,\d/|\d,\d; )|iPad/)(\d{1,2})[_\.](\d{1,2})(?:[_\.](\d+))?' + os_replacement: 'iOS' + + ########## + # Apple TV + ########## + - regex: '(tvOS)/(\d+).(\d+)' + os_replacement: 'tvOS' + + ########## + # Chrome OS + # if version 0.0.0, probably this stuff: + # http://code.google.com/p/chromium-os/issues/detail?id=11573 + # http://code.google.com/p/chromium-os/issues/detail?id=13790 + ########## + - regex: '(CrOS) [a-z0-9_]+ (\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'Chrome OS' + + ########## + # Linux distros + ########## + - regex: '([Dd]ebian)' + os_replacement: 'Debian' + - regex: '(Linux Mint)(?:/(\d+))?' + - regex: '(Mandriva)(?: Linux)?/(?:[\d.-]+m[a-z]{2}(\d+).(\d))?' + + ########## + # Symbian + Symbian OS + # http://en.wikipedia.org/wiki/History_of_Symbian + ########## + - regex: '(Symbian[Oo][Ss])[/ ](\d+)\.(\d+)' + os_replacement: 'Symbian OS' + - regex: '(Symbian/3).+NokiaBrowser/7\.3' + os_replacement: 'Symbian^3 Anna' + - regex: '(Symbian/3).+NokiaBrowser/7\.4' + os_replacement: 'Symbian^3 Belle' + - regex: '(Symbian/3)' + os_replacement: 'Symbian^3' + - regex: '\b(Series 60|SymbOS|S60Version|S60V\d|S60\b)' + os_replacement: 'Symbian OS' + - regex: '(MeeGo)' + - regex: 'Symbian [Oo][Ss]' + os_replacement: 'Symbian OS' + - regex: 'Series40;' + os_replacement: 'Nokia Series 40' + - regex: 'Series30Plus;' + os_replacement: 'Nokia Series 30 Plus' + + ########## + # BlackBerry devices + ########## + - regex: '(BB10);.+Version/(\d+)\.(\d+)\.(\d+)' + os_replacement: 'BlackBerry OS' + - regex: '(Black[Bb]erry)[0-9a-z]+/(\d+)\.(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'BlackBerry OS' + - regex: '(Black[Bb]erry).+Version/(\d+)\.(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'BlackBerry OS' + - regex: '(RIM Tablet OS) (\d+)\.(\d+)\.(\d+)' + os_replacement: 'BlackBerry Tablet OS' + - regex: '(Play[Bb]ook)' + os_replacement: 'BlackBerry Tablet OS' + - regex: '(Black[Bb]erry)' + os_replacement: 'BlackBerry OS' + + ########## + # Firefox OS + ########## + - regex: '\((?:Mobile|Tablet);.+Gecko/18.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '0' + os_v3_replacement: '1' + + - regex: '\((?:Mobile|Tablet);.+Gecko/18.1 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '1' + + - regex: '\((?:Mobile|Tablet);.+Gecko/26.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '2' + + - regex: '\((?:Mobile|Tablet);.+Gecko/28.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '3' + + - regex: '\((?:Mobile|Tablet);.+Gecko/30.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '4' + + - regex: '\((?:Mobile|Tablet);.+Gecko/32.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '2' + os_v2_replacement: '0' + + - regex: '\((?:Mobile|Tablet);.+Gecko/34.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '2' + os_v2_replacement: '1' + + # Firefox OS Generic + - regex: '\((?:Mobile|Tablet);.+Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + + + ########## + # BREW + # yes, Brew is lower-cased for Brew MP + ########## + - regex: '(BREW)[ /](\d+)\.(\d+)\.(\d+)' + - regex: '(BREW);' + - regex: '(Brew MP|BMP)[ /](\d+)\.(\d+)\.(\d+)' + os_replacement: 'Brew MP' + - regex: 'BMP;' + os_replacement: 'Brew MP' + + ########## + # Google TV + ########## + - regex: '(GoogleTV)(?: (\d+)\.(\d+)(?:\.(\d+))?|/[\da-z]+)' + + - regex: '(WebTV)/(\d+).(\d+)' + + ########## + # Misc mobile + ########## + - regex: '(hpw|web)OS/(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'webOS' + - regex: '(VRE);' + + ########## + # Generic patterns + # since the majority of os cases are very specific, these go last + ########## + - regex: '(Fedora|Red Hat|PCLinuxOS|Puppy|Ubuntu|Kindle|Bada|Lubuntu|BackTrack|Slackware|(?:Free|Open|Net|\b)BSD)[/ ](\d+)\.(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + + # Gentoo Linux + Kernel Version + - regex: '(Linux)[ /](\d+)\.(\d+)(?:\.(\d+))?.*gentoo' + os_replacement: 'Gentoo' + + # Opera Mini Bada + - regex: '\((Bada);' + + # just os + - regex: '(Windows|Android|WeTab|Maemo)' + - regex: '(Ubuntu|Kubuntu|Arch Linux|CentOS|Slackware|Gentoo|openSUSE|SUSE|Red Hat|Fedora|PCLinuxOS|Mageia|(?:Free|Open|Net|\b)BSD)' + # Linux + Kernel Version + - regex: '(Linux)(?:[ /](\d+)\.(\d+)(?:\.(\d+))?)?' + - regex: 'SunOS' + os_replacement: 'Solaris' + + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-(\d+)\.(\d+)' + +device_parsers: + + ######### + # Mobile Spiders + # Catch the mobile crawler before checking for iPhones / Androids. + ######### + - regex: '(?:(?:iPhone|Windows CE|Android).*(?:(?:Bot|Yeti)-Mobile|YRSpider|bots?/\d|(?:bot|spider)\.html)|AdsBot-Google-Mobile.*iPhone)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Smartphone' + - regex: '(?:DoCoMo|\bMOT\b|\bLG\b|Nokia|Samsung|SonyEricsson).*(?:(?:Bot|Yeti)-Mobile|bots?/\d|(?:bot|crawler)\.html|(?:jump|google|Wukong)bot|ichiro/mobile|/spider|YahooSeeker)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Feature Phone' + + ######### + # WebBrowser for SmartWatch + # @ref: https://play.google.com/store/apps/details?id=se.vaggan.webbrowser&hl=en + ######### + - regex: '\bSmartWatch *\( *([^;]+) *; *([^;]+) *;' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ###################################################################### + # Android parsers + # + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ###################################################################### + + # Android Application + - regex: 'Android Application[^\-]+ - (Sony) ?(Ericsson)? (.+) \w+ - ' + device_replacement: '$1 $2' + brand_replacement: '$1$2' + model_replacement: '$3' + - regex: 'Android Application[^\-]+ - (?:HTC|HUAWEI|LGE|LENOVO|MEDION|TCT) (HTC|HUAWEI|LG|LENOVO|MEDION|ALCATEL)[ _\-](.+) \w+ - ' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: 'Android Application[^\-]+ - ([^ ]+) (.+) \w+ - ' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # 3Q + # @ref: http://www.3q-int.com/ + ######### + - regex: '; *([BLRQ]C\d{4}[A-Z]+) +Build/' + device_replacement: '3Q $1' + brand_replacement: '3Q' + model_replacement: '$1' + - regex: '; *(?:3Q_)([^;/]+) +Build' + device_replacement: '3Q $1' + brand_replacement: '3Q' + model_replacement: '$1' + + ######### + # Acer + # @ref: http://us.acer.com/ac/en/US/content/group/tablets + ######### + - regex: 'Android [34].*; *(A100|A101|A110|A200|A210|A211|A500|A501|A510|A511|A700(?: Lite| 3G)?|A701|B1-A71|A1-\d{3}|B1-\d{3}|V360|V370|W500|W500P|W501|W501P|W510|W511|W700|Slider SL101|DA22[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *Acer Iconia Tab ([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *(Z1[1235]0|E320[^/]*|S500|S510|Liquid[^;/]*|Iconia A\d+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *(Acer |ACER )([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Acer' + model_replacement: '$2' + + ######### + # Advent + # @ref: https://en.wikipedia.org/wiki/Advent_Vega + # @note: VegaBean and VegaComb (names derived from jellybean, honeycomb) are + # custom ROM builds for Vega + ######### + - regex: '; *(Advent )?(Vega(?:Bean|Comb)?).* Build' + device_replacement: '$1$2' + brand_replacement: 'Advent' + model_replacement: '$2' + + ######### + # Ainol + # @ref: http://www.ainol.com/plugin.php?identifier=ainol&module=product + ######### + - regex: '; *(Ainol )?((?:NOVO|[Nn]ovo)[^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Ainol' + model_replacement: '$2' + + ######### + # Airis + # @ref: http://airis.es/Tienda/Default.aspx?idG=001 + ######### + - regex: '; *AIRIS[ _\-]?([^/;\)]+) *(?:;|\)|Build)' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Airis' + model_replacement: '$1' + - regex: '; *(OnePAD[^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Airis' + model_replacement: '$1' + + ######### + # Airpad + # @ref: ?? + ######### + - regex: '; *Airpad[ \-]([^;/]+) Build' + device_replacement: 'Airpad $1' + brand_replacement: 'Airpad' + model_replacement: '$1' + + ######### + # Alcatel - TCT + # @ref: http://www.alcatelonetouch.com/global-en/products/smartphones.html + ######### + - regex: '; *(one ?touch) (EVO7|T10|T20) Build' + device_replacement: 'Alcatel One Touch $2' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $2' + - regex: '; *(?:alcatel[ _])?(?:(?:one[ _]?touch[ _])|ot[ \-])([^;/]+);? Build' + regex_flag: 'i' + device_replacement: 'Alcatel One Touch $1' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $1' + - regex: '; *(TCL)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # operator specific models + - regex: '; *(Vodafone Smart II|Optimus_Madrid) Build' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + - regex: '; *BASE_Lutea_3 Build' + device_replacement: 'Alcatel One Touch 998' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch 998' + - regex: '; *BASE_Varia Build' + device_replacement: 'Alcatel One Touch 918D' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch 918D' + + ######### + # Allfine + # @ref: http://www.myallfine.com/Products.asp + ######### + - regex: '; *((?:FINE|Fine)\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Allfine' + model_replacement: '$1' + + ######### + # Allview + # @ref: http://www.allview.ro/produse/droseries/lista-tablete-pc/ + ######### + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)((?:Speed|SPEED).*) Build/' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)?(AX1_Shine|AX2_Frenzy) Build' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)([^;/]*) Build' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + + ######### + # Allwinner + # @ref: http://www.allwinner.com/ + # @models: A31 (13.3"),A20,A10, + ######### + - regex: '; *(A13-MID) Build' + device_replacement: '$1' + brand_replacement: 'Allwinner' + model_replacement: '$1' + - regex: '; *(Allwinner)[ _\-]?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Allwinner' + model_replacement: '$1' + + ######### + # Amaway + # @ref: http://www.amaway.cn/ + ######### + - regex: '; *(A651|A701B?|A702|A703|A705|A706|A707|A711|A712|A713|A717|A722|A785|A801|A802|A803|A901|A902|A1002|A1003|A1006|A1007|A9701|A9703|Q710|Q80) Build' + device_replacement: '$1' + brand_replacement: 'Amaway' + model_replacement: '$1' + + ######### + # Amoi + # @ref: http://www.amoi.com/en/prd/prd_index.jspx + ######### + - regex: '; *(?:AMOI|Amoi)[ _]([^;/]+) Build' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + - regex: '^(?:AMOI|Amoi)[ _]([^;/]+) Linux' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + + ######### + # Aoc + # @ref: http://latin.aoc.com/media_tablet + ######### + - regex: '; *(MW(?:0[789]|10)[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Aoc' + model_replacement: '$1' + + ######### + # Aoson + # @ref: http://www.luckystar.com.cn/en/mid.aspx?page=1 + # @ref: http://www.luckystar.com.cn/en/mobiletel.aspx?page=1 + # @note: brand owned by luckystar + ######### + - regex: '; *(G7|M1013|M1015G|M11[CG]?|M-?12[B]?|M15|M19[G]?|M30[ACQ]?|M31[GQ]|M32|M33[GQ]|M36|M37|M38|M701T|M710|M712B|M713|M715G|M716G|M71(?:G|GS|T)?|M72[T]?|M73[T]?|M75[GT]?|M77G|M79T|M7L|M7LN|M81|M810|M81T|M82|M92|M92KS|M92S|M717G|M721|M722G|M723|M725G|M739|M785|M791|M92SK|M93D) Build' + device_replacement: 'Aoson $1' + brand_replacement: 'Aoson' + model_replacement: '$1' + - regex: '; *Aoson ([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Aoson $1' + brand_replacement: 'Aoson' + model_replacement: '$1' + + ######### + # Apanda + # @ref: http://www.apanda.com.cn/ + ######### + - regex: '; *[Aa]panda[ _\-]([^;/]+) Build' + device_replacement: 'Apanda $1' + brand_replacement: 'Apanda' + model_replacement: '$1' + + ######### + # Archos + # @ref: http://www.archos.com/de/products/tablets.html + # @ref: http://www.archos.com/de/products/smartphones/index.html + ######### + - regex: '; *(?:ARCHOS|Archos) ?(GAMEPAD.*?)(?: Build|[;/\(\)\-])' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: 'ARCHOS; GOGI; ([^;]+);' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '(?:ARCHOS|Archos)[ _]?(.*?)(?: Build|[;/\(\)\-]|$)' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '; *(AN(?:7|8|9|10|13)[A-Z0-9]{1,4}) Build' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '; *(A28|A32|A43|A70(?:BHT|CHT|HB|S|X)|A101(?:B|C|IT)|A7EB|A7EB-WK|101G9|80G9) Build' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + + ######### + # A-rival + # @ref: http://www.a-rival.de/de/ + ######### + - regex: '; *(PAD-FMD[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Arival' + model_replacement: '$1' + - regex: '; *(BioniQ) ?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Arival' + model_replacement: '$1 $2' + + ######### + # Arnova + # @ref: http://arnovatech.com/ + ######### + - regex: '; *(AN\d[^;/]+|ARCHM\d+) Build' + device_replacement: 'Arnova $1' + brand_replacement: 'Arnova' + model_replacement: '$1' + - regex: '; *(?:ARNOVA|Arnova) ?([^;/]+) Build' + device_replacement: 'Arnova $1' + brand_replacement: 'Arnova' + model_replacement: '$1' + + ######### + # Assistant + # @ref: http://www.assistant.ua + ######### + - regex: '; *(?:ASSISTANT )?(AP)-?([1789]\d{2}[A-Z]{0,2}|80104) Build' + device_replacement: 'Assistant $1-$2' + brand_replacement: 'Assistant' + model_replacement: '$1-$2' + + ######### + # Asus + # @ref: http://www.asus.com/uk/Tablets_Mobile/ + ######### + - regex: '; *(ME17\d[^;/]*|ME3\d{2}[^;/]+|K00[A-Z]|Nexus 10|Nexus 7(?: 2013)?|PadFone[^;/]*|Transformer[^;/]*|TF\d{3}[^;/]*|eeepc) Build' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + - regex: '; *ASUS[ _]*([^;/]+) Build' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + ######### + # Garmin-Asus + ######### + - regex: '; *Garmin-Asus ([^;/]+) Build' + device_replacement: 'Garmin-Asus $1' + brand_replacement: 'Garmin-Asus' + model_replacement: '$1' + - regex: '; *(Garminfone) Build' + device_replacement: 'Garmin $1' + brand_replacement: 'Garmin-Asus' + model_replacement: '$1' + + ######### + # Attab + # @ref: http://www.theattab.com/ + ######### + - regex: '; (@TAB-[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Attab' + model_replacement: '$1' + + ######### + # Audiosonic + # @ref: ?? + # @note: Take care with Docomo T-01 Toshiba + ######### + - regex: '; *(T-(?:07|[^0]\d)[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Audiosonic' + model_replacement: '$1' + + ######### + # Axioo + # @ref: http://www.axiooworld.com/ww/index.php + ######### + - regex: '; *(?:Axioo[ _\-]([^;/]+)|(picopad)[ _\-]([^;/]+)) Build' + regex_flag: 'i' + device_replacement: 'Axioo $1$2 $3' + brand_replacement: 'Axioo' + model_replacement: '$1$2 $3' + + ######### + # Azend + # @ref: http://azendcorp.com/index.php/products/portable-electronics + ######### + - regex: '; *(V(?:100|700|800)[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Azend' + model_replacement: '$1' + + ######### + # Bak + # @ref: http://www.bakinternational.com/produtos.php?cat=80 + ######### + - regex: '; *(IBAK\-[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Bak' + model_replacement: '$1' + + ######### + # Bedove + # @ref: http://www.bedove.com/product.html + # @models: HY6501|HY5001|X12|X21|I5 + ######### + - regex: '; *(HY5001|HY6501|X12|X21|I5) Build' + device_replacement: 'Bedove $1' + brand_replacement: 'Bedove' + model_replacement: '$1' + + ######### + # Benss + # @ref: http://www.benss.net/ + ######### + - regex: '; *(JC-[^;/]*) Build' + device_replacement: 'Benss $1' + brand_replacement: 'Benss' + model_replacement: '$1' + + ######### + # Blackberry + # @ref: http://uk.blackberry.com/ + # @note: Android Apps seams to be used here + ######### + - regex: '; *(BB) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Blackberry' + model_replacement: '$2' + + ######### + # Blackbird + # @ref: http://iblackbird.co.kr + ######### + - regex: '; *(BlackBird)[ _](I8.*) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: '; *(BlackBird)[ _](.*) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Blaupunkt + # @ref: http://www.blaupunkt.com + ######### + # Endeavour + - regex: '; *([0-9]+BP[EM][^;/]*|Endeavour[^;/]+) Build' + device_replacement: 'Blaupunkt $1' + brand_replacement: 'Blaupunkt' + model_replacement: '$1' + + ######### + # Blu + # @ref: http://bluproducts.com + ######### + - regex: '; *((?:BLU|Blu)[ _\-])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Blu' + model_replacement: '$2' + # BMOBILE = operator branded device + - regex: '; *(?:BMOBILE )?(Blu|BLU|DASH [^;/]+|VIVO 4\.3|TANK 4\.5) Build' + device_replacement: '$1' + brand_replacement: 'Blu' + model_replacement: '$1' + + ######### + # Blusens + # @ref: http://www.blusens.com/es/?sg=1&sv=al&roc=1 + ######### + # tablet + - regex: '; *(TOUCH\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Blusens' + model_replacement: '$1' + + ######### + # Bmobile + # @ref: http://bmobile.eu.com/?categoria=smartphones-2 + # @note: Might collide with Maxx as AX is used also there. + ######### + # smartphone + - regex: '; *(AX5\d+) Build' + device_replacement: '$1' + brand_replacement: 'Bmobile' + model_replacement: '$1' + + ######### + # bq + # @ref: http://bqreaders.com + ######### + - regex: '; *([Bb]q) ([^;/]+);? Build' + device_replacement: '$1 $2' + brand_replacement: 'bq' + model_replacement: '$2' + - regex: '; *(Maxwell [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'bq' + model_replacement: '$1' + + ######### + # Braun Phototechnik + # @ref: http://www.braun-phototechnik.de/en/products/list/~pcat.250/Tablet-PC.html + ######### + - regex: '; *((?:B-Tab|B-TAB) ?\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Braun' + model_replacement: '$1' + + ######### + # Broncho + # @ref: http://www.broncho.cn/ + ######### + - regex: '; *(Broncho) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Captiva + # @ref: http://www.captiva-power.de + ######### + - regex: '; *CAPTIVA ([^;/]+) Build' + device_replacement: 'Captiva $1' + brand_replacement: 'Captiva' + model_replacement: '$1' + + ######### + # Casio + # @ref: http://www.casiogzone.com/ + ######### + - regex: '; *(C771|CAL21|IS11CA) Build' + device_replacement: '$1' + brand_replacement: 'Casio' + model_replacement: '$1' + + ######### + # Cat + # @ref: http://www.cat-sound.com + ######### + - regex: '; *(?:Cat|CAT) ([^;/]+) Build' + device_replacement: 'Cat $1' + brand_replacement: 'Cat' + model_replacement: '$1' + - regex: '; *(?:Cat)(Nova.*) Build' + device_replacement: 'Cat $1' + brand_replacement: 'Cat' + model_replacement: '$1' + - regex: '; *(INM8002KP|ADM8000KP_[AB]) Build' + device_replacement: '$1' + brand_replacement: 'Cat' + model_replacement: 'Tablet PHOENIX 8.1J0' + + ######### + # Celkon + # @ref: http://www.celkonmobiles.com/?_a=products + # @models: A10, A19Q, A101, A105, A107, A107\+, A112, A118, A119, A119Q, A15, A19, A20, A200, A220, A225, A22 Race, A27, A58, A59, A60, A62, A63, A64, A66, A67, A69, A75, A77, A79, A8\+, A83, A85, A86, A87, A89 Ultima, A9\+, A90, A900, A95, A97i, A98, AR 40, AR 45, AR 50, ML5 + ######### + - regex: '; *(?:[Cc]elkon[ _\*]|CELKON[ _\*])([^;/\)]+) ?(?:Build|;|\))' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + - regex: 'Build/(?:[Cc]elkon)+_?([^;/_\)]+)' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + - regex: '; *(CT)-?(\d+) Build' + device_replacement: '$1$2' + brand_replacement: 'Celkon' + model_replacement: '$1$2' + # smartphones + - regex: '; *(A19|A19Q|A105|A107[^;/\)]*) ?(?:Build|;|\))' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + + ######### + # ChangJia + # @ref: http://www.cjshowroom.com/eproducts.aspx?classcode=004001001 + # @brief: China manufacturer makes tablets for different small brands + # (eg. http://www.zeepad.net/index.html) + ######### + - regex: '; *(TPC[0-9]{4,5}) Build' + device_replacement: '$1' + brand_replacement: 'ChangJia' + model_replacement: '$1' + + ######### + # Cloudfone + # @ref: http://www.cloudfonemobile.com/ + ######### + - regex: '; *(Cloudfone)[ _](Excite)([^ ][^;/]+) Build' + device_replacement: '$1 $2 $3' + brand_replacement: 'Cloudfone' + model_replacement: '$1 $2 $3' + - regex: '; *(Excite|ICE)[ _](\d+[^;/]+) Build' + device_replacement: 'Cloudfone $1 $2' + brand_replacement: 'Cloudfone' + model_replacement: 'Cloudfone $1 $2' + - regex: '; *(Cloudfone|CloudPad)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Cloudfone' + model_replacement: '$1 $2' + + ######### + # Cmx + # @ref: http://cmx.at/de/ + ######### + - regex: '; *((?:Aquila|Clanga|Rapax)[^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cmx' + model_replacement: '$1' + + ######### + # CobyKyros + # @ref: http://cobykyros.com + # @note: Be careful with MID\d{3} from MpMan or Manta + ######### + - regex: '; *(?:CFW-|Kyros )?(MID[0-9]{4}(?:[ABC]|SR|TV)?)(\(3G\)-4G| GB 8K| 3G| 8K| GB)? *(?:Build|[;\)])' + device_replacement: 'CobyKyros $1$2' + brand_replacement: 'CobyKyros' + model_replacement: '$1$2' + + ######### + # Coolpad + # @ref: ?? + ######### + - regex: '; *([^;/]*)Coolpad[ _]([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Coolpad' + model_replacement: '$1$2' + + ######### + # Cube + # @ref: http://www.cube-tablet.com/buy-products.html + ######### + - regex: '; *(CUBE[ _])?([KU][0-9]+ ?GT.*|A5300) Build' + regex_flag: 'i' + device_replacement: '$1$2' + brand_replacement: 'Cube' + model_replacement: '$2' + + ######### + # Cubot + # @ref: http://www.cubotmall.com/ + ######### + - regex: '; *CUBOT ([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cubot' + model_replacement: '$1' + - regex: '; *(BOBBY) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cubot' + model_replacement: '$1' + + ######### + # Danew + # @ref: http://www.danew.com/produits-tablette.php + ######### + - regex: '; *(Dslide [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Danew' + model_replacement: '$1' + + ######### + # Dell + # @ref: http://www.dell.com + # @ref: http://www.softbank.jp/mobile/support/product/101dl/ + # @ref: http://www.softbank.jp/mobile/support/product/001dl/ + # @ref: http://developer.emnet.ne.jp/android.html + # @ref: http://www.dell.com/in/p/mobile-xcd28/pd + # @ref: http://www.dell.com/in/p/mobile-xcd35/pd + ######### + - regex: '; *(XCD)[ _]?(28|35) Build' + device_replacement: 'Dell $1$2' + brand_replacement: 'Dell' + model_replacement: '$1$2' + - regex: '; *(001DL) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak' + - regex: '; *(?:Dell|DELL) (Streak) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak' + - regex: '; *(101DL|GS01|Streak Pro[^;/]*) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak Pro' + - regex: '; *([Ss]treak ?7) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak 7' + - regex: '; *(Mini-3iX) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *(?:Dell|DELL)[ _](Aero|Venue|Thunder|Mini.*|Streak[ _]Pro) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *Dell[ _]([^;/]+) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *Dell ([^;/]+) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ######### + # Denver + # @ref: http://www.denver-electronics.com/tablets1/ + ######### + - regex: '; *(TA[CD]-\d+[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Denver' + model_replacement: '$1' + + ######### + # Dex + # @ref: http://dex.ua/ + ######### + - regex: '; *(iP[789]\d{2}(?:-3G)?|IP10\d{2}(?:-8GB)?) Build' + device_replacement: '$1' + brand_replacement: 'Dex' + model_replacement: '$1' + + ######### + # DNS AirTab + # @ref: http://www.dns-shop.ru/ + ######### + - regex: '; *(AirTab)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'DNS' + model_replacement: '$1 $2' + + ######### + # Docomo (Operator Branded Device) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(F\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: '$1' + - regex: '; *(HT-03A) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Magic' + - regex: '; *(HT\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(L\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '; *(N\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: '$1' + - regex: '; *(P\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + - regex: '; *(SC\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(SH\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(SO\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + - regex: '; *(T\-0[12][^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # DOOV + # @ref: http://www.doov.com.cn/ + ######### + - regex: '; *(DOOV)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'DOOV' + model_replacement: '$2' + + ######### + # Enot + # @ref: http://www.enot.ua/ + ######### + - regex: '; *(Enot|ENOT)[ -]?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Enot' + model_replacement: '$2' + + ######### + # Evercoss + # @ref: http://evercoss.com/android/ + ######### + - regex: '; *[^;/]+ Build/(?:CROSS|Cross)+[ _\-]([^\)]+)' + device_replacement: 'CROSS $1' + brand_replacement: 'Evercoss' + model_replacement: 'Cross $1' + - regex: '; *(CROSS|Cross)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Evercoss' + model_replacement: 'Cross $2' + + ######### + # Explay + # @ref: http://explay.ru/ + ######### + - regex: '; *Explay[_ ](.+?)(?:[\)]| Build)' + device_replacement: '$1' + brand_replacement: 'Explay' + model_replacement: '$1' + + ######### + # Fly + # @ref: http://www.fly-phone.com/ + ######### + - regex: '; *(IQ.*) Build' + device_replacement: '$1' + brand_replacement: 'Fly' + model_replacement: '$1' + - regex: '; *(Fly|FLY)[ _](IQ[^;]+|F[34]\d+[^;]*);? Build' + device_replacement: '$1 $2' + brand_replacement: 'Fly' + model_replacement: '$2' + + ######### + # Fujitsu + # @ref: http://www.fujitsu.com/global/ + ######### + - regex: '; *(M532|Q572|FJL21) Build/' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: '$1' + + ######### + # Galapad + # @ref: http://www.galapad.net/product.html + ######### + - regex: '; *(G1) Build' + device_replacement: '$1' + brand_replacement: 'Galapad' + model_replacement: '$1' + + ######### + # Geeksphone + # @ref: http://www.geeksphone.com/ + ######### + - regex: '; *(Geeksphone) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Gfive + # @ref: http://www.gfivemobile.com/en + ######### + #- regex: '; *(G\'?FIVE) ([^;/]+) Build' # there is a problem with python yaml parser here + - regex: '; *(G[^F]?FIVE) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Gfive' + model_replacement: '$2' + + ######### + # Gionee + # @ref: http://www.gionee.com/ + ######### + - regex: '; *(Gionee)[ _\-]([^;/]+)(?:/[^;/]+)? Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Gionee' + model_replacement: '$2' + - regex: '; *(GN\d+[A-Z]?|INFINITY_PASSION|Ctrl_V1) Build' + device_replacement: 'Gionee $1' + brand_replacement: 'Gionee' + model_replacement: '$1' + - regex: '; *(E3) Build/JOP40D' + device_replacement: 'Gionee $1' + brand_replacement: 'Gionee' + model_replacement: '$1' + + ######### + # GoClever + # @ref: http://www.goclever.com + ######### + - regex: '; *((?:FONE|QUANTUM|INSIGNIA) \d+[^;/]*|PLAYTAB) Build' + device_replacement: 'GoClever $1' + brand_replacement: 'GoClever' + model_replacement: '$1' + - regex: '; *GOCLEVER ([^;/]+) Build' + device_replacement: 'GoClever $1' + brand_replacement: 'GoClever' + model_replacement: '$1' + + ######### + # Google + # @ref: http://www.google.de/glass/start/ + ######### + - regex: '; *(Glass \d+) Build' + device_replacement: '$1' + brand_replacement: 'Google' + model_replacement: '$1' + + ######### + # Gigabyte + # @ref: http://gsmart.gigabytecm.com/en/ + ######### + - regex: '; *(GSmart)[ -]([^/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Gigabyte' + model_replacement: '$1 $2' + + ######### + # Freescale development boards + # @ref: http://www.freescale.com/webapp/sps/site/prod_summary.jsp?code=IMX53QSB + ######### + - regex: '; *(imx5[13]_[^/]+) Build' + device_replacement: 'Freescale $1' + brand_replacement: 'Freescale' + model_replacement: '$1' + + ######### + # Haier + # @ref: http://www.haier.com/ + # @ref: http://www.haier.com/de/produkte/tablet/ + ######### + - regex: '; *Haier[ _\-]([^/]+) Build' + device_replacement: 'Haier $1' + brand_replacement: 'Haier' + model_replacement: '$1' + - regex: '; *(PAD1016) Build' + device_replacement: 'Haipad $1' + brand_replacement: 'Haipad' + model_replacement: '$1' + + ######### + # Haipad + # @ref: http://www.haipad.net/ + # @models: V7P|M7SM7S|M9XM9X|M7XM7X|M9|M8|M7-M|M1002|M7|M701 + ######### + - regex: '; *(M701|M7|M8|M9) Build' + device_replacement: 'Haipad $1' + brand_replacement: 'Haipad' + model_replacement: '$1' + + ######### + # Hannspree + # @ref: http://www.hannspree.eu/ + # @models: SN10T1|SN10T2|SN70T31B|SN70T32W + ######### + - regex: '; *(SN\d+T[^;\)/]*)(?: Build|[;\)])' + device_replacement: 'Hannspree $1' + brand_replacement: 'Hannspree' + model_replacement: '$1' + + ######### + # HCLme + # @ref: http://www.hclmetablet.com/india/ + ######### + - regex: 'Build/HCL ME Tablet ([^;\)]+)[\);]' + device_replacement: 'HCLme $1' + brand_replacement: 'HCLme' + model_replacement: '$1' + - regex: '; *([^;\/]+) Build/HCL' + device_replacement: 'HCLme $1' + brand_replacement: 'HCLme' + model_replacement: '$1' + + ######### + # Hena + # @ref: http://www.henadigital.com/en/product/index.asp?id=6 + ######### + - regex: '; *(MID-?\d{4}C[EM]) Build' + device_replacement: 'Hena $1' + brand_replacement: 'Hena' + model_replacement: '$1' + + ######### + # Hisense + # @ref: http://www.hisense.com/ + ######### + - regex: '; *(EG\d{2,}|HS-[^;/]+|MIRA[^;/]+) Build' + device_replacement: 'Hisense $1' + brand_replacement: 'Hisense' + model_replacement: '$1' + - regex: '; *(andromax[^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Hisense $1' + brand_replacement: 'Hisense' + model_replacement: '$1' + + ######### + # hitech + # @ref: http://www.hitech-mobiles.com/ + ######### + - regex: '; *(?:AMAZE[ _](S\d+)|(S\d+)[ _]AMAZE) Build' + device_replacement: 'AMAZE $1$2' + brand_replacement: 'hitech' + model_replacement: 'AMAZE $1$2' + + ######### + # HP + # @ref: http://www.hp.com/ + ######### + - regex: '; *(PlayBook) Build' + device_replacement: 'HP $1' + brand_replacement: 'HP' + model_replacement: '$1' + - regex: '; *HP ([^/]+) Build' + device_replacement: 'HP $1' + brand_replacement: 'HP' + model_replacement: '$1' + - regex: '; *([^/]+_tenderloin) Build' + device_replacement: 'HP TouchPad' + brand_replacement: 'HP' + model_replacement: 'TouchPad' + + ######### + # Huawei + # @ref: http://www.huaweidevice.com + # @note: Needs to be before HTC due to Desire HD Build on U8815 + ######### + - regex: '; *(HUAWEI |Huawei-)?([UY][^;/]+) Build/(?:Huawei|HUAWEI)([UY][^\);]+)\)' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *([^;/]+) Build[/ ]Huawei(MT1-U06|[A-Z]+\d+[^\);]+)[^\);]*\)' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *(S7|M860) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *((?:HUAWEI|Huawei)[ \-]?)(MediaPad) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI[ _]?|Huawei[ _])?Ascend[ _])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI|Huawei)[ _\-]?)((?:G700-|MT-)[^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI|Huawei)[ _\-]?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *(MediaPad[^;]+|SpringBoard) Build/Huawei' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *([^;]+) Build/Huawei' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *([Uu])([89]\d{3}) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: 'U$2' + - regex: '; *(?:Ideos |IDEOS )(S7) Build' + device_replacement: 'Huawei Ideos$1' + brand_replacement: 'Huawei' + model_replacement: 'Ideos$1' + - regex: '; *(?:Ideos |IDEOS )([^;/]+\s*|\s*)Build' + device_replacement: 'Huawei Ideos$1' + brand_replacement: 'Huawei' + model_replacement: 'Ideos$1' + - regex: '; *(Orange Daytona|Pulse|Pulse Mini|Vodafone 858|C8500|C8600|C8650|C8660|Nexus 6P) Build' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + + ######### + # HTC + # @ref: http://www.htc.com/www/products/ + # @ref: http://en.wikipedia.org/wiki/List_of_HTC_phones + ######### + + - regex: '; *HTC[ _]([^;]+); Windows Phone' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + # Android HTC with Version Number matcher + # ; HTC_0P3Z11/1.12.161.3 Build + # ;HTC_A3335 V2.38.841.1 Build + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+))?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+))?)?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2 $3' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+))?)?)?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3 $4' + + # Android HTC without Version Number matcher + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/;]+)(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/;\)]+))?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/;\)]+))?)?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2 $3' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ /;]+))?)?)?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3 $4' + + # HTC Streaming Player + - regex: 'HTC Streaming Player [^\/]*/[^\/]*/ htc_([^/]+) /' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + # general matcher for anything else + - regex: '(?:[;,] *|^)(?:htccn_chs-)?HTC[ _-]?([^;]+?)(?: *Build|clay|Android|-?Mozilla| Opera| Profile| UNTRUSTED|[;/\(\)]|$)' + regex_flag: 'i' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + # Android matchers without HTC + - regex: '; *(A6277|ADR6200|ADR6300|ADR6350|ADR6400[A-Z]*|ADR6425[A-Z]*|APX515CKT|ARIA|Desire[^_ ]*|Dream|EndeavorU|Eris|Evo|Flyer|HD2|Hero|HERO200|Hero CDMA|HTL21|Incredible|Inspire[A-Z0-9]*|Legend|Liberty|Nexus ?(?:One|HD2)|One|One S C2|One[ _]?(?:S|V|X\+?)\w*|PC36100|PG06100|PG86100|S31HT|Sensation|Wildfire)(?: Build|[/;\(\)])' + regex_flag: 'i' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(ADR6200|ADR6400L|ADR6425LVW|Amaze|DesireS?|EndeavorU|Eris|EVO|Evo\d[A-Z]+|HD2|IncredibleS?|Inspire[A-Z0-9]*|Inspire[A-Z0-9]*|Sensation[A-Z0-9]*|Wildfire)[ _-](.+?)(?:[/;\)]|Build|MIUI|1\.0)' + regex_flag: 'i' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + + ######### + # Hyundai + # @ref: http://www.hyundaitechnologies.com + ######### + - regex: '; *HYUNDAI (T\d[^/]*) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + - regex: '; *HYUNDAI ([^;/]+) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + # X900? http://www.amazon.com/Hyundai-X900-Retina-Android-Bluetooth/dp/B00AO07H3O + - regex: '; *(X700|Hold X|MB-6900) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + + ######### + # iBall + # @ref: http://www.iball.co.in/Category/Mobiles/22 + ######### + - regex: '; *(?:iBall[ _\-])?(Andi)[ _]?(\d[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iBall' + model_replacement: '$1 $2' + - regex: '; *(IBall)(?:[ _]([^;/]+)|) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iBall' + model_replacement: '$2' + + ######### + # IconBIT + # @ref: http://www.iconbit.com/catalog/tablets/ + ######### + - regex: '; *(NT-\d+[^ ;/]*|Net[Tt]AB [^;/]+|Mercury [A-Z]+|iconBIT)(?: S/N:[^;/]+)? Build' + device_replacement: '$1' + brand_replacement: 'IconBIT' + model_replacement: '$1' + + ######### + # IMO + # @ref: http://www.ponselimo.com/ + ######### + - regex: '; *(IMO)[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'IMO' + model_replacement: '$2' + + ######### + # i-mobile + # @ref: http://www.i-mobilephone.com/ + ######### + - regex: '; *i-?mobile[ _]([^/]+) Build/' + regex_flag: 'i' + device_replacement: 'i-mobile $1' + brand_replacement: 'imobile' + model_replacement: '$1' + - regex: '; *(i-(?:style|note)[^/]*) Build/' + regex_flag: 'i' + device_replacement: 'i-mobile $1' + brand_replacement: 'imobile' + model_replacement: '$1' + + ######### + # Impression + # @ref: http://impression.ua/planshetnye-kompyutery + ######### + - regex: '; *(ImPAD) ?(\d+(?:.)*) Build' + device_replacement: '$1 $2' + brand_replacement: 'Impression' + model_replacement: '$1 $2' + + ######### + # Infinix + # @ref: http://www.infinixmobility.com/index.html + ######### + - regex: '; *(Infinix)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Infinix' + model_replacement: '$2' + + ######### + # Informer + # @ref: ?? + ######### + - regex: '; *(Informer)[ \-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Informer' + model_replacement: '$2' + + ######### + # Intenso + # @ref: http://www.intenso.de + # @models: 7":TAB 714,TAB 724;8":TAB 814,TAB 824;10":TAB 1004 + ######### + - regex: '; *(TAB) ?([78][12]4) Build' + device_replacement: 'Intenso $1' + brand_replacement: 'Intenso' + model_replacement: '$1 $2' + + ######### + # Intex + # @ref: http://intexmobile.in/index.aspx + # @note: Zync also offers a "Cloud Z5" device + ######### + # smartphones + - regex: '; *(?:Intex[ _])?(AQUA|Aqua)([ _\.\-])([^;/]+) *(?:Build|;)' + device_replacement: '$1$2$3' + brand_replacement: 'Intex' + model_replacement: '$1 $3' + # matches "INTEX CLOUD X1" + - regex: '; *(?:INTEX|Intex)(?:[_ ]([^\ _;/]+))(?:[_ ]([^\ _;/]+))? *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Intex' + model_replacement: '$1 $2' + # tablets + - regex: '; *([iI]Buddy)[ _]?(Connect)(?:_|\?_| )?([^;/]*) *(?:Build|;)' + device_replacement: '$1 $2 $3' + brand_replacement: 'Intex' + model_replacement: 'iBuddy $2 $3' + - regex: '; *(I-Buddy)[ _]([^;/]+) *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Intex' + model_replacement: 'iBuddy $2' + + ######### + # iOCEAN + # @ref: http://www.iocean.cc/ + ######### + - regex: '; *(iOCEAN) ([^/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iOCEAN' + model_replacement: '$2' + + ######### + # i.onik + # @ref: http://www.i-onik.de/ + ######### + - regex: '; *(TP\d+(?:\.\d+)?\-\d[^;/]+) Build' + device_replacement: 'ionik $1' + brand_replacement: 'ionik' + model_replacement: '$1' + + ######### + # IRU.ru + # @ref: http://www.iru.ru/catalog/soho/planetable/ + ######### + - regex: '; *(M702pro) Build' + device_replacement: '$1' + brand_replacement: 'Iru' + model_replacement: '$1' + + ######### + # Ivio + # @ref: http://www.ivio.com/mobile.php + # @models: DG80,DG20,DE38,DE88,MD70 + ######### + - regex: '; *(DE88Plus|MD70) Build' + device_replacement: '$1' + brand_replacement: 'Ivio' + model_replacement: '$1' + - regex: '; *IVIO[_\-]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Ivio' + model_replacement: '$1' + + ######### + # Jaytech + # @ref: http://www.jay-tech.de/jaytech/servlet/frontend/ + ######### + - regex: '; *(TPC-\d+|JAY-TECH) Build' + device_replacement: '$1' + brand_replacement: 'Jaytech' + model_replacement: '$1' + + ######### + # Jiayu + # @ref: http://www.ejiayu.com/en/Product.html + ######### + - regex: '; *(JY-[^;/]+|G[234]S?) Build' + device_replacement: '$1' + brand_replacement: 'Jiayu' + model_replacement: '$1' + + ######### + # JXD + # @ref: http://www.jxd.hk/ + ######### + - regex: '; *(JXD)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'JXD' + model_replacement: '$2' + + ######### + # Karbonn + # @ref: http://www.karbonnmobiles.com/products_tablet.php + ######### + - regex: '; *Karbonn[ _]?([^;/]+) *(?:Build|;)' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + - regex: '; *([^;]+) Build/Karbonn' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + - regex: '; *(A11|A39|A37|A34|ST8|ST10|ST7|Smart Tab3|Smart Tab2|Titanium S\d) +Build' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + + ######### + # KDDI (Operator Branded Device) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(IS01|IS03|IS05|IS\d{2}SH) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(IS04) Build' + device_replacement: '$1' + brand_replacement: 'Regza' + model_replacement: '$1' + - regex: '; *(IS06|IS\d{2}PT) Build' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: '$1' + - regex: '; *(IS11S) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: 'Xperia Acro' + - regex: '; *(IS11CA) Build' + device_replacement: '$1' + brand_replacement: 'Casio' + model_replacement: 'GzOne $1' + - regex: '; *(IS11LG) Build' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: 'Optimus X' + - regex: '; *(IS11N) Build' + device_replacement: '$1' + brand_replacement: 'Medias' + model_replacement: '$1' + - regex: '; *(IS11PT) Build' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: 'MIRACH' + - regex: '; *(IS12F) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: 'Arrows ES' + # @ref: https://ja.wikipedia.org/wiki/IS12M + - regex: '; *(IS12M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'XT909' + - regex: '; *(IS12S) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: 'Xperia Acro HD' + - regex: '; *(ISW11F) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: 'Arrowz Z' + - regex: '; *(ISW11HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'EVO' + - regex: '; *(ISW11K) Build' + device_replacement: '$1' + brand_replacement: 'Kyocera' + model_replacement: 'DIGNO' + - regex: '; *(ISW11M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'Photon' + - regex: '; *(ISW11SC) Build' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: 'GALAXY S II WiMAX' + - regex: '; *(ISW12HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'EVO 3D' + - regex: '; *(ISW13HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'J' + - regex: '; *(ISW?[0-9]{2}[A-Z]{0,2}) Build' + device_replacement: '$1' + brand_replacement: 'KDDI' + model_replacement: '$1' + - regex: '; *(INFOBAR [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'KDDI' + model_replacement: '$1' + + ######### + # Kingcom + # @ref: http://www.e-kingcom.com + ######### + - regex: '; *(JOYPAD|Joypad)[ _]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Kingcom' + model_replacement: '$1 $2' + + ######### + # Kobo + # @ref: https://en.wikipedia.org/wiki/Kobo_Inc. + # @ref: http://www.kobo.com/devices#tablets + ######### + - regex: '; *(Vox|VOX|Arc|K080) Build/' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Kobo' + model_replacement: '$1' + - regex: '\b(Kobo Touch)\b' + device_replacement: '$1' + brand_replacement: 'Kobo' + model_replacement: '$1' + + ######### + # K-Touch + # @ref: ?? + ######### + - regex: '; *(K-Touch)[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Ktouch' + model_replacement: '$2' + + ######### + # KT Tech + # @ref: http://www.kttech.co.kr + ######### + - regex: '; *((?:EV|KM)-S\d+[A-Z]?) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'KTtech' + model_replacement: '$1' + + ######### + # Kyocera + # @ref: http://www.android.com/devices/?country=all&m=kyocera + ######### + - regex: '; *(Zio|Hydro|Torque|Event|EVENT|Echo|Milano|Rise|URBANO PROGRESSO|WX04K|WX06K|WX10K|KYL21|101K|C5[12]\d{2}) Build/' + device_replacement: '$1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + + ######### + # Lava + # @ref: http://www.lavamobiles.com/ + ######### + - regex: '; *(?:LAVA[ _])?IRIS[ _\-]?([^/;\)]+) *(?:;|\)|Build)' + regex_flag: 'i' + device_replacement: 'Iris $1' + brand_replacement: 'Lava' + model_replacement: 'Iris $1' + - regex: '; *LAVA[ _]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Lava' + model_replacement: '$1' + + ######### + # Lemon + # @ref: http://www.lemonmobiles.com/products.php?type=1 + ######### + - regex: '; *(?:(Aspire A1)|(?:LEMON|Lemon)[ _]([^;/]+))_? Build' + device_replacement: 'Lemon $1$2' + brand_replacement: 'Lemon' + model_replacement: '$1$2' + + ######### + # Lenco + # @ref: http://www.lenco.com/c/tablets/ + ######### + - regex: '; *(TAB-1012) Build/' + device_replacement: 'Lenco $1' + brand_replacement: 'Lenco' + model_replacement: '$1' + - regex: '; Lenco ([^;/]+) Build/' + device_replacement: 'Lenco $1' + brand_replacement: 'Lenco' + model_replacement: '$1' + + ######### + # Lenovo + # @ref: http://support.lenovo.com/en_GB/downloads/default.page?# + ######### + - regex: '; *(A1_07|A2107A-H|S2005A-H|S1-37AH0) Build' + device_replacement: '$1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '; *(Idea[Tp]ab)[ _]([^;/]+);? Build' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(Idea(?:Tab|pad)) ?([^;/]+) Build' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(ThinkPad) ?(Tablet) Build/' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(?:LNV-)?(?:=?[Ll]enovo[ _\-]?|LENOVO[ _])+(.+?)(?:Build|[;/\)])' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '[;,] (?:Vodafone )?(SmartTab) ?(II) ?(\d+) Build/' + device_replacement: 'Lenovo $1 $2 $3' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2 $3' + - regex: '; *(?:Ideapad )?K1 Build/' + device_replacement: 'Lenovo Ideapad K1' + brand_replacement: 'Lenovo' + model_replacement: 'Ideapad K1' + - regex: '; *(3GC101|3GW10[01]|A390) Build/' + device_replacement: '$1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '\b(?:Lenovo|LENOVO)+[ _\-]?([^,;:/ ]+)' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + + ######### + # Lexibook + # @ref: http://www.lexibook.com/fr + ######### + - regex: '; *(MFC\d+)[A-Z]{2}([^;,/]*),? Build' + device_replacement: '$1$2' + brand_replacement: 'Lexibook' + model_replacement: '$1$2' + + ######### + # LG + # @ref: http://www.lg.com/uk/mobile + ######### + - regex: '; *(E[34][0-9]{2}|LS[6-8][0-9]{2}|VS[6-9][0-9]+[^;/]+|Nexus 4|Nexus 5X?|GT540f?|Optimus (?:2X|G|4X HD)|OptimusX4HD) *(?:Build|;)' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '[;:] *(L-\d+[A-Z]|LGL\d+[A-Z]?)(?:/V\d+)? *(?:Build|[;\)])' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '; *(LG-)([A-Z]{1,2}\d{2,}[^,;/\)\(]*?)(?:Build| V\d+|[,;/\)\(]|$)' + device_replacement: '$1$2' + brand_replacement: 'LG' + model_replacement: '$2' + - regex: '; *(LG[ \-]|LG)([^;/]+)[;/]? Build' + device_replacement: '$1$2' + brand_replacement: 'LG' + model_replacement: '$2' + - regex: '^(LG)-([^;/]+)/ Mozilla/.*; Android' + device_replacement: '$1 $2' + brand_replacement: 'LG' + model_replacement: '$2' + + ######### + # Malata + # @ref: http://www.malata.com/en/products.aspx?classid=680 + ######### + - regex: '; *((?:SMB|smb)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Malata' + model_replacement: '$1' + - regex: '; *(?:Malata|MALATA) ([^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Malata' + model_replacement: '$1' + + ######### + # Manta + # @ref: http://www.manta.com.pl/en + ######### + - regex: '; *(MS[45][0-9]{3}|MID0[568][NS]?|MID[1-9]|MID[78]0[1-9]|MID970[1-9]|MID100[1-9]) Build/' + device_replacement: '$1' + brand_replacement: 'Manta' + model_replacement: '$1' + + ######### + # Match + # @ref: http://www.match.net.cn/products.asp + ######### + - regex: '; *(M1052|M806|M9000|M9100|M9701|MID100|MID120|MID125|MID130|MID135|MID140|MID701|MID710|MID713|MID727|MID728|MID731|MID732|MID733|MID735|MID736|MID737|MID760|MID800|MID810|MID820|MID830|MID833|MID835|MID860|MID900|MID930|MID933|MID960|MID980) Build/' + device_replacement: '$1' + brand_replacement: 'Match' + model_replacement: '$1' + + ######### + # Maxx + # @ref: http://www.maxxmobile.in/ + # @models: Maxx MSD7-Play, Maxx MX245+ Trance, Maxx AX8 Race, Maxx MSD7 3G- AX50, Maxx Genx Droid 7 - AX40, Maxx AX5 Duo, + # Maxx AX3 Duo, Maxx AX3, Maxx AX8 Note II (Note 2), Maxx AX8 Note I, Maxx AX8, Maxx AX5 Plus, Maxx MSD7 Smarty, + # Maxx AX9Z Race, + # Maxx MT150, Maxx MQ601, Maxx M2020, Maxx Sleek MX463neo, Maxx MX525, Maxx MX192-Tune, Maxx Genx Droid 7 AX353, + # @note: Need more User-Agents!!! + ######### + - regex: '; *(GenxDroid7|MSD7.*|AX\d.*|Tab 701|Tab 722) Build/' + device_replacement: 'Maxx $1' + brand_replacement: 'Maxx' + model_replacement: '$1' + + ######### + # Mediacom + # @ref: http://www.mediacomeurope.it/ + ######### + - regex: '; *(M-PP[^;/]+|PhonePad ?\d{2,}[^;/]+) Build' + device_replacement: 'Mediacom $1' + brand_replacement: 'Mediacom' + model_replacement: '$1' + - regex: '; *(M-MP[^;/]+|SmartPad ?\d{2,}[^;/]+) Build' + device_replacement: 'Mediacom $1' + brand_replacement: 'Mediacom' + model_replacement: '$1' + + ######### + # Medion + # @ref: http://www.medion.com/en/ + ######### + - regex: '; *(?:MD_)?LIFETAB[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Medion Lifetab $1' + brand_replacement: 'Medion' + model_replacement: 'Lifetab $1' + - regex: '; *MEDION ([^;/]+) Build' + device_replacement: 'Medion $1' + brand_replacement: 'Medion' + model_replacement: '$1' + + ######### + # Meizu + # @ref: http://www.meizu.com + ######### + - regex: '; *(M030|M031|M035|M040|M065|m9) Build' + device_replacement: 'Meizu $1' + brand_replacement: 'Meizu' + model_replacement: '$1' + - regex: '; *(?:meizu_|MEIZU )(.+?) *(?:Build|[;\)])' + device_replacement: 'Meizu $1' + brand_replacement: 'Meizu' + model_replacement: '$1' + + ######### + # Micromax + # @ref: http://www.micromaxinfo.com + ######### + - regex: '; *(?:Micromax[ _](A111|A240)|(A111|A240)) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1$2' + brand_replacement: 'Micromax' + model_replacement: '$1$2' + - regex: '; *Micromax[ _](A\d{2,3}[^;/]*) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + # be carefull here with Acer e.g. A500 + - regex: '; *(A\d{2}|A[12]\d{2}|A90S|A110Q) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + - regex: '; *Micromax[ _](P\d{3}[^;/]*) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + - regex: '; *(P\d{3}|P\d{3}\(Funbook\)) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + + ######### + # Mito + # @ref: http://new.mitomobile.com/ + ######### + - regex: '; *(MITO)[ _\-]?([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Mito' + model_replacement: '$2' + + ######### + # Mobistel + # @ref: http://www.mobistel.com/ + ######### + - regex: '; *(Cynus)[ _](F5|T\d|.+?) *(?:Build|[;/\)])' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Mobistel' + model_replacement: '$1 $2' + + ######### + # Modecom + # @ref: http://www.modecom.eu/tablets/portal/ + ######### + - regex: '; *(MODECOM )?(FreeTab) ?([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1$2 $3' + brand_replacement: 'Modecom' + model_replacement: '$2 $3' + - regex: '; *(MODECOM )([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Modecom' + model_replacement: '$2' + + ######### + # Motorola + # @ref: http://www.motorola.com/us/shop-all-mobile-phones/ + ######### + - regex: '; *(MZ\d{3}\+?|MZ\d{3} 4G|Xoom|XOOM[^;/]*) Build' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(Milestone )(XT[^;/]*) Build' + device_replacement: 'Motorola $1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + - regex: '; *(Motoroi ?x|Droid X|DROIDX) Build' + regex_flag: 'i' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: 'DROID X' + - regex: '; *(Droid[^;/]*|DROID[^;/]*|Milestone[^;/]*|Photon|Triumph|Devour|Titanium) Build' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(A555|A85[34][^;/]*|A95[356]|ME[58]\d{2}\+?|ME600|ME632|ME722|MB\d{3}\+?|MT680|MT710|MT870|MT887|MT917|WX435|WX453|WX44[25]|XT\d{3,4}[A-Z\+]*|CL[iI]Q|CL[iI]Q XT) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(Motorola MOT-|Motorola[ _\-]|MOT\-?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + - regex: '; *(Moto[_ ]?|MOT\-)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + + ######### + # MpMan + # @ref: http://www.mpmaneurope.com + ######### + - regex: '; *((?:MP[DQ]C|MPG\d{1,4}|MP\d{3,4}|MID(?:(?:10[234]|114|43|7[247]|8[24]|7)C|8[01]1))[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Mpman' + model_replacement: '$1' + + ######### + # MSI + # @ref: http://www.msi.com/product/windpad/ + ######### + - regex: '; *(?:MSI[ _])?(Primo\d+|Enjoy[ _\-][^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Msi' + model_replacement: '$1' + + ######### + # Multilaser + # http://www.multilaser.com.br/listagem_produtos.php?cat=5 + ######### + - regex: '; *Multilaser[ _]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Multilaser' + model_replacement: '$1' + + ######### + # MyPhone + # @ref: http://myphone.com.ph/ + ######### + - regex: '; *(My)[_]?(Pad)[ _]([^;/]+) Build' + device_replacement: '$1$2 $3' + brand_replacement: 'MyPhone' + model_replacement: '$1$2 $3' + - regex: '; *(My)\|?(Phone)[ _]([^;/]+) Build' + device_replacement: '$1$2 $3' + brand_replacement: 'MyPhone' + model_replacement: '$3' + - regex: '; *(A\d+)[ _](Duo)? Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'MyPhone' + model_replacement: '$1 $2' + + ######### + # Mytab + # @ref: http://www.mytab.eu/en/category/mytab-products/ + ######### + - regex: '; *(myTab[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Mytab' + model_replacement: '$1' + + ######### + # Nabi + # @ref: https://www.nabitablet.com + ######### + - regex: '; *(NABI2?-)([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Nabi' + model_replacement: '$2' + + ######### + # Nec Medias + # @ref: http://www.n-keitai.com/ + ######### + - regex: '; *(N-\d+[CDE]) Build/' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: '$1' + - regex: '; ?(NEC-)(.*) Build/' + device_replacement: '$1$2' + brand_replacement: 'Nec' + model_replacement: '$2' + - regex: '; *(LT-NA7) Build/' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: 'Lifetouch Note' + + ######### + # Nextbook + # @ref: http://nextbookusa.com + ######### + - regex: '; *(NXM\d+[A-z0-9_]*|Next\d[A-z0-9_ \-]*|NEXT\d[A-z0-9_ \-]*|Nextbook [A-z0-9_ ]*|DATAM803HC|M805)(?: Build|[\);])' + device_replacement: '$1' + brand_replacement: 'Nextbook' + model_replacement: '$1' + + ######### + # Nokia + # @ref: http://www.nokia.com + ######### + - regex: '; *(Nokia)([ _\-]*)([^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1$2$3' + brand_replacement: 'Nokia' + model_replacement: '$3' + + ######### + # Nook + # @ref: + # TODO nook browser/1.0 + ######### + - regex: '; *(Nook ?|Barnes & Noble Nook |BN )([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Nook' + model_replacement: '$2' + - regex: '; *(NOOK )?(BNRV200|BNRV200A|BNTV250|BNTV250A|BNTV400|BNTV600|LogicPD Zoom2) Build' + device_replacement: '$1$2' + brand_replacement: 'Nook' + model_replacement: '$2' + - regex: '; Build/(Nook)' + device_replacement: '$1' + brand_replacement: 'Nook' + model_replacement: 'Tablet' + + ######### + # Olivetti + # @ref: http://www.olivetti.de/EN/Page/t02/view_html?idp=348 + ######### + - regex: '; *(OP110|OliPad[^;/]+) Build' + device_replacement: 'Olivetti $1' + brand_replacement: 'Olivetti' + model_replacement: '$1' + + ######### + # Omega + # @ref: http://omega-technology.eu/en/produkty/346/tablets + # @note: MID tablets might get matched by CobyKyros first + # @models: (T107|MID(?:700[2-5]|7031|7108|7132|750[02]|8001|8500|9001|971[12]) + ######### + - regex: '; *OMEGA[ _\-](MID[^;/]+) Build' + device_replacement: 'Omega $1' + brand_replacement: 'Omega' + model_replacement: '$1' + - regex: '^(MID7500|MID\d+) Mozilla/5\.0 \(iPad;' + device_replacement: 'Omega $1' + brand_replacement: 'Omega' + model_replacement: '$1' + + ######### + # OpenPeak + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ######### + - regex: '; *((?:CIUS|cius)[^;/]*) Build' + device_replacement: 'Openpeak $1' + brand_replacement: 'Openpeak' + model_replacement: '$1' + + ######### + # Oppo + # @ref: http://en.oppo.com/products/ + ######### + - regex: '; *(Find ?(?:5|7a)|R8[012]\d{1,2}|T703\d{0,1}|U70\d{1,2}T?|X90\d{1,2}) Build' + device_replacement: 'Oppo $1' + brand_replacement: 'Oppo' + model_replacement: '$1' + - regex: '; *OPPO ?([^;/]+) Build/' + device_replacement: 'Oppo $1' + brand_replacement: 'Oppo' + model_replacement: '$1' + + ######### + # Odys + # @ref: http://odys.de + ######### + - regex: '; *(?:Odys\-|ODYS\-|ODYS )([^;/]+) Build' + device_replacement: 'Odys $1' + brand_replacement: 'Odys' + model_replacement: '$1' + - regex: '; *(SELECT) ?(7) Build' + device_replacement: 'Odys $1 $2' + brand_replacement: 'Odys' + model_replacement: '$1 $2' + - regex: '; *(PEDI)_(PLUS)_(W) Build' + device_replacement: 'Odys $1 $2 $3' + brand_replacement: 'Odys' + model_replacement: '$1 $2 $3' + # Weltbild - Tablet PC 4 = Cat Phoenix = Odys Tablet PC 4? + - regex: '; *(AEON|BRAVIO|FUSION|FUSION2IN1|Genio|EOS10|IEOS[^;/]*|IRON|Loox|LOOX|LOOX Plus|Motion|NOON|NOON_PRO|NEXT|OPOS|PEDI[^;/]*|PRIME[^;/]*|STUDYTAB|TABLO|Tablet-PC-4|UNO_X8|XELIO[^;/]*|Xelio ?\d+ ?[Pp]ro|XENO10|XPRESS PRO) Build' + device_replacement: 'Odys $1' + brand_replacement: 'Odys' + model_replacement: '$1' + + ######### + # Orion + # @ref: http://www.orion.ua/en/products/computer-products/tablet-pcs.html + ######### + - regex: '; *(TP-\d+) Build/' + device_replacement: 'Orion $1' + brand_replacement: 'Orion' + model_replacement: '$1' + + ######### + # PackardBell + # @ref: http://www.packardbell.com/pb/en/AE/content/productgroup/tablets + ######### + - regex: '; *(G100W?) Build/' + device_replacement: 'PackardBell $1' + brand_replacement: 'PackardBell' + model_replacement: '$1' + + ######### + # Panasonic + # @ref: http://panasonic.jp/mobile/ + # @models: T11, T21, T31, P11, P51, Eluga Power, Eluga DL1 + # @models: (tab) Toughpad FZ-A1, Toughpad JT-B1 + ######### + - regex: '; *(Panasonic)[_ ]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # Toughpad + - regex: '; *(FZ-A1B|JT-B1) Build' + device_replacement: 'Panasonic $1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + # Eluga Power + - regex: '; *(dL1|DL1) Build' + device_replacement: 'Panasonic $1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + + ######### + # Pantech + # @href: http://www.pantech.co.kr/en/prod/prodList.do?gbrand=PANTECH + # @href: http://www.pantech.co.kr/en/prod/prodList.do?gbrand=VEGA + # @models: ADR8995, ADR910L, ADR930VW, C790, CDM8992, CDM8999, IS06, IS11PT, P2000, P2020, P2030, P4100, P5000, P6010, P6020, P6030, P7000, P7040, P8000, P8010, P9020, P9050, P9060, P9070, P9090, PT001, PT002, PT003, TXT8040, TXT8045, VEGA PTL21 + ######### + - regex: '; *(SKY[ _])?(IM\-[AT]\d{3}[^;/]+).* Build/' + device_replacement: 'Pantech $1$2' + brand_replacement: 'Pantech' + model_replacement: '$1$2' + - regex: '; *((?:ADR8995|ADR910L|ADR930L|ADR930VW|PTL21|P8000)(?: 4G)?) Build/' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: '$1' + - regex: '; *Pantech([^;/]+).* Build/' + device_replacement: 'Pantech $1' + brand_replacement: 'Pantech' + model_replacement: '$1' + + ######### + # Papayre + # @ref: http://grammata.es/ + ######### + - regex: '; *(papyre)[ _\-]([^;/]+) Build/' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Papyre' + model_replacement: '$2' + + ######### + # Pearl + # @ref: http://www.pearl.de/c-1540.shtml + ######### + - regex: '; *(?:Touchlet )?(X10\.[^;/]+) Build/' + device_replacement: 'Pearl $1' + brand_replacement: 'Pearl' + model_replacement: '$1' + + ######### + # Phicomm + # @ref: http://www.phicomm.com.cn/ + ######### + - regex: '; PHICOMM (i800) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + - regex: '; PHICOMM ([^;/]+) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + - regex: '; *(FWS\d{3}[^;/]+) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + + ######### + # Philips + # @ref: http://www.support.philips.com/support/catalog/products.jsp?_dyncharset=UTF-8&country=&categoryid=MOBILE_PHONES_SMART_SU_CN_CARE&userLanguage=en&navCount=2&groupId=PC_PRODUCTS_AND_PHONES_GR_CN_CARE&catalogType=&navAction=push&userCountry=cn&title=Smartphones&cateId=MOBILE_PHONES_CA_CN_CARE + # @TODO: Philips Tablets User-Agents missing! + # @ref: http://www.support.philips.com/support/catalog/products.jsp?_dyncharset=UTF-8&country=&categoryid=ENTERTAINMENT_TABLETS_SU_CN_CARE&userLanguage=en&navCount=0&groupId=&catalogType=&navAction=push&userCountry=cn&title=Entertainment+Tablets&cateId=TABLETS_CA_CN_CARE + ######### + # @note: this a best guess according to available philips models. Need more User-Agents + - regex: '; *(D633|D822|D833|T539|T939|V726|W335|W336|W337|W3568|W536|W5510|W626|W632|W6350|W6360|W6500|W732|W736|W737|W7376|W820|W832|W8355|W8500|W8510|W930) Build' + device_replacement: '$1' + brand_replacement: 'Philips' + model_replacement: '$1' + - regex: '; *(?:Philips|PHILIPS)[ _]([^;/]+) Build' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + + ######### + # Pipo + # @ref: http://www.pipo.cn/En/ + ######### + - regex: 'Android 4\..*; *(M[12356789]|U[12368]|S[123])\ ?(pro)? Build' + device_replacement: 'Pipo $1$2' + brand_replacement: 'Pipo' + model_replacement: '$1$2' + + ######### + # Ployer + # @ref: http://en.ployer.cn/ + ######### + - regex: '; *(MOMO[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Ployer' + model_replacement: '$1' + + ######### + # Polaroid/ Acho + # @ref: http://polaroidstore.com/store/start.asp?category_id=382&category_id2=0&order=title&filter1=&filter2=&filter3=&view=all + ######### + - regex: '; *(?:Polaroid[ _])?((?:MIDC\d{3,}|PMID\d{2,}|PTAB\d{3,})[^;/]*)(\/[^;/]*)? Build/' + device_replacement: '$1' + brand_replacement: 'Polaroid' + model_replacement: '$1' + - regex: '; *(?:Polaroid )(Tablet) Build/' + device_replacement: '$1' + brand_replacement: 'Polaroid' + model_replacement: '$1' + + ######### + # Pomp + # @ref: http://pompmobileshop.com/ + ######### + #~ TODO + - regex: '; *(POMP)[ _\-](.+?) *(?:Build|[;/\)])' + device_replacement: '$1 $2' + brand_replacement: 'Pomp' + model_replacement: '$2' + + ######### + # Positivo + # @ref: http://www.positivoinformatica.com.br/www/pessoal/tablet-ypy/ + ######### + - regex: '; *(TB07STA|TB10STA|TB07FTA|TB10FTA) Build/' + device_replacement: '$1' + brand_replacement: 'Positivo' + model_replacement: '$1' + - regex: '; *(?:Positivo )?((?:YPY|Ypy)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Positivo' + model_replacement: '$1' + + ######### + # POV + # @ref: http://www.pointofview-online.com/default2.php + # @TODO: Smartphone Models MOB-3515, MOB-5045-B missing + ######### + - regex: '; *(MOB-[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'POV' + model_replacement: '$1' + - regex: '; *POV[ _\-]([^;/]+) Build/' + device_replacement: 'POV $1' + brand_replacement: 'POV' + model_replacement: '$1' + - regex: '; *((?:TAB-PLAYTAB|TAB-PROTAB|PROTAB|PlayTabPro|Mobii[ _\-]|TAB-P)[^;/]*) Build/' + device_replacement: 'POV $1' + brand_replacement: 'POV' + model_replacement: '$1' + + ######### + # Prestigio + # @ref: http://www.prestigio.com/catalogue/MultiPhones + # @ref: http://www.prestigio.com/catalogue/MultiPads + ######### + - regex: '; *(?:Prestigio )?((?:PAP|PMP)\d[^;/]+) Build/' + device_replacement: 'Prestigio $1' + brand_replacement: 'Prestigio' + model_replacement: '$1' + + ######### + # Proscan + # @ref: http://www.proscanvideo.com/products-search.asp?itemClass=TABLET&itemnmbr= + ######### + - regex: '; *(PLT[0-9]{4}.*) Build/' + device_replacement: '$1' + brand_replacement: 'Proscan' + model_replacement: '$1' + + ######### + # QMobile + # @ref: http://www.qmobile.com.pk/ + ######### + - regex: '; *(A2|A5|A8|A900)_?(Classic)? Build' + device_replacement: '$1 $2' + brand_replacement: 'Qmobile' + model_replacement: '$1 $2' + - regex: '; *(Q[Mm]obile)_([^_]+)_([^_]+) Build' + device_replacement: 'Qmobile $2 $3' + brand_replacement: 'Qmobile' + model_replacement: '$2 $3' + - regex: '; *(Q\-?[Mm]obile)[_ ](A[^;/]+) Build' + device_replacement: 'Qmobile $2' + brand_replacement: 'Qmobile' + model_replacement: '$2' + + ######### + # Qmobilevn + # @ref: http://qmobile.vn/san-pham.html + ######### + - regex: '; *(Q\-Smart)[ _]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Qmobilevn' + model_replacement: '$2' + - regex: '; *(Q\-?[Mm]obile)[ _\-](S[^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Qmobilevn' + model_replacement: '$2' + + ######### + # Quanta + # @ref: ? + ######### + - regex: '; *(TA1013) Build' + device_replacement: '$1' + brand_replacement: 'Quanta' + model_replacement: '$1' + + ######### + # Rockchip + # @ref: http://www.rock-chips.com/a/cn/product/index.html + # @note: manufacturer sells chipsets - I assume that these UAs are dev-boards + ######### + - regex: '; *(RK\d+),? Build/' + device_replacement: '$1' + brand_replacement: 'Rockchip' + model_replacement: '$1' + - regex: ' Build/(RK\d+)' + device_replacement: '$1' + brand_replacement: 'Rockchip' + model_replacement: '$1' + + ######### + # Samsung Android Devices + # @ref: http://www.samsung.com/us/mobile/cell-phones/all-products + ######### + - regex: '; *(SAMSUNG |Samsung )?((?:Galaxy (?:Note II|S\d)|GT-I9082|GT-I9205|GT-N7\d{3}|SM-N9005)[^;/]*)\/?[^;/]* Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(Google )?(Nexus [Ss](?: 4G)?) Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(SAMSUNG |Samsung )([^\/]*)\/[^ ]* Build/' + device_replacement: 'Samsung $2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(Galaxy(?: Ace| Nexus| S ?II+|Nexus S| with MCR 1.2| Mini Plus 4G)?) Build/' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(SAMSUNG[ _\-] *)+([^;/]+) Build' + device_replacement: 'Samsung $2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(SAMSUNG-)?(GT\-[BINPS]\d{4}[^\/]*)(\/[^ ]*) Build' + device_replacement: 'Samsung $1$2$3' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '(?:; *|^)((?:GT\-[BIiNPS]\d{4}|I9\d{2}0[A-Za-z\+]?\b)[^;/\)]*?)(?:Build|Linux|MIUI|[;/\)])' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; (SAMSUNG-)([A-Za-z0-9\-]+).* Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *((?:SCH|SGH|SHV|SHW|SPH|SC|SM)\-[A-Za-z0-9 ]+)(/?[^ ]*)? Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: ' ((?:SCH)\-[A-Za-z0-9 ]+)(/?[^ ]*)? Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(Behold ?(?:2|II)|YP\-G[^;/]+|EK-GC100|SCL21|I9300) Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Sharp + # @ref: http://www.sharp-phone.com/en/index.html + # @ref: http://www.android.com/devices/?country=all&m=sharp + ######### + - regex: '; *(SH\-?\d\d[^;/]+|SBM\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(SHARP[ -])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Sharp' + model_replacement: '$2' + + ######### + # Simvalley + # @ref: http://www.simvalley-mobile.de/ + ######### + - regex: '; *(SPX[_\-]\d[^;/]*) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + - regex: '; *(SX7\-PEARL\.GmbH) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + - regex: '; *(SP[T]?\-\d{2}[^;/]*) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + + ######### + # SK Telesys + # @ref: http://www.sk-w.com/phone/phone_list.jsp + # @ref: http://www.android.com/devices/?country=all&m=sk-telesys + ######### + - regex: '; *(SK\-.*) Build/' + device_replacement: '$1' + brand_replacement: 'SKtelesys' + model_replacement: '$1' + + ######### + # Skytex + # @ref: http://skytex.com/android + ######### + - regex: '; *(?:SKYTEX|SX)-([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Skytex' + model_replacement: '$1' + - regex: '; *(IMAGINE [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Skytex' + model_replacement: '$1' + + ######### + # SmartQ + # @ref: http://en.smartdevices.com.cn/Products/ + # @models: Z8, X7, U7H, U7, T30, T20, Ten3, V5-II, T7-3G, SmartQ5, K7, S7, Q8, T19, Ten2, Ten, R10, T7, R7, V5, V7, SmartQ7 + ######### + - regex: '; *(SmartQ) ?([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Smartbitt + # @ref: http://www.smartbitt.com/ + # @missing: SBT Useragents + ######### + - regex: '; *(WF7C|WF10C|SBT[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Smartbitt' + model_replacement: '$1' + + ######### + # Softbank (Operator Branded Devices) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(SBM(?:003SH|005SH|006SH|007SH|102SH)) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(003P|101P|101P11C|102P) Build' + device_replacement: '$1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + - regex: '; *(00\dZ) Build/' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; HTC(X06HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(001HT|X06HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(201M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'XT902' + + ######### + # Trekstor + # @ref: http://www.trekstor.co.uk/surftabs-en.html + # @note: Must come before SonyEricsson + ######### + - regex: '; *(ST\d{4}.*)Build/ST' + device_replacement: 'Trekstor $1' + brand_replacement: 'Trekstor' + model_replacement: '$1' + - regex: '; *(ST\d{4}.*) Build/' + device_replacement: 'Trekstor $1' + brand_replacement: 'Trekstor' + model_replacement: '$1' + + ######### + # SonyEricsson + # @note: Must come before nokia since they also use symbian + # @ref: http://www.android.com/devices/?country=all&m=sony-ericssons + # @TODO: type! + ######### + # android matchers + - regex: '; *(Sony ?Ericsson ?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'SonyEricsson' + model_replacement: '$2' + - regex: '; *((?:SK|ST|E|X|LT|MK|MT|WT)\d{2}[a-z0-9]*(?:-o)?|R800i|U20i) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + # TODO X\d+ is wrong + - regex: '; *(Xperia (?:A8|Arc|Acro|Active|Live with Walkman|Mini|Neo|Play|Pro|Ray|X\d+)[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + + ######### + # Sony + # @ref: http://www.sonymobile.co.jp/index.html + # @ref: http://www.sonymobile.com/global-en/products/phones/ + # @ref: http://www.sony.jp/tablet/ + ######### + - regex: '; Sony (Tablet[^;/]+) Build' + device_replacement: 'Sony $1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; Sony ([^;/]+) Build' + device_replacement: 'Sony $1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(Sony)([A-Za-z0-9\-]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: '; *(Xperia [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(C(?:1[0-9]|2[0-9]|53|55|6[0-9])[0-9]{2}|D[25]\d{3}|D6[56]\d{2}) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(SGP\d{3}|SGPT\d{2}) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(NW-Z1000Series) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + + ########## + # Sony PlayStation + # @ref: http://playstation.com + # The Vita spoofs the Kindle + ########## + - regex: 'PLAYSTATION 3' + device_replacement: 'PlayStation 3' + brand_replacement: 'Sony' + model_replacement: 'PlayStation 3' + - regex: '(PlayStation (?:Portable|Vita|\d+))' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + + ######### + # Spice + # @ref: http://www.spicemobilephones.co.in/ + ######### + - regex: '; *((?:CSL_Spice|Spice|SPICE|CSL)[ _\-]?)?([Mm][Ii])([ _\-])?(\d{3}[^;/]*) Build/' + device_replacement: '$1$2$3$4' + brand_replacement: 'Spice' + model_replacement: 'Mi$4' + + ######### + # Sprint (Operator Branded Devices) + # @ref: + ######### + - regex: '; *(Sprint )(.+?) *(?:Build|[;/])' + device_replacement: '$1$2' + brand_replacement: 'Sprint' + model_replacement: '$2' + - regex: '\b(Sprint)[: ]([^;,/ ]+)' + device_replacement: '$1$2' + brand_replacement: 'Sprint' + model_replacement: '$2' + + ######### + # Tagi + # @ref: ?? + ######### + - regex: '; *(TAGI[ ]?)(MID) ?([^;/]+) Build/' + device_replacement: '$1$2$3' + brand_replacement: 'Tagi' + model_replacement: '$2$3' + + ######### + # Tecmobile + # @ref: http://www.tecmobile.com/ + ######### + - regex: '; *(Oyster500|Opal 800) Build' + device_replacement: 'Tecmobile $1' + brand_replacement: 'Tecmobile' + model_replacement: '$1' + + ######### + # Tecno + # @ref: www.tecno-mobile.com/‎ + ######### + - regex: '; *(TECNO[ _])([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Tecno' + model_replacement: '$2' + + ######### + # Telechips, Techvision evaluation boards + # @ref: + ######### + - regex: '; *Android for (Telechips|Techvision) ([^ ]+) ' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Telstra + # @ref: http://www.telstra.com.au/home-phone/thub-2/ + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ######### + - regex: '; *(T-Hub2) Build/' + device_replacement: '$1' + brand_replacement: 'Telstra' + model_replacement: '$1' + + ######### + # Terra + # @ref: http://www.wortmann.de/ + ######### + - regex: '; *(PAD) ?(100[12]) Build/' + device_replacement: 'Terra $1$2' + brand_replacement: 'Terra' + model_replacement: '$1$2' + + ######### + # Texet + # @ref: http://www.texet.ru/tablet/ + ######### + - regex: '; *(T[BM]-\d{3}[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Texet' + model_replacement: '$1' + + ######### + # Thalia + # @ref: http://www.thalia.de/shop/tolino-shine-ereader/show/ + ######### + - regex: '; *(tolino [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Thalia' + model_replacement: '$1' + - regex: '; *Build/.* (TOLINO_BROWSER)' + device_replacement: '$1' + brand_replacement: 'Thalia' + model_replacement: 'Tolino Shine' + + ######### + # Thl + # @ref: http://en.thl.com.cn/Mobile + # @ref: http://thlmobilestore.com + ######### + - regex: '; *(?:CJ[ -])?(ThL|THL)[ -]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Thl' + model_replacement: '$2' + - regex: '; *(T100|T200|T5|W100|W200|W8s) Build/' + device_replacement: '$1' + brand_replacement: 'Thl' + model_replacement: '$1' + + ######### + # T-Mobile (Operator Branded Devices) + ######### + # @ref: https://en.wikipedia.org/wiki/HTC_Hero + - regex: '; *(T-Mobile[ _]G2[ _]Touch) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Hero' + # @ref: https://en.wikipedia.org/wiki/HTC_Desire_Z + - regex: '; *(T-Mobile[ _]G2) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Desire Z' + - regex: '; *(T-Mobile myTouch Q) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: 'U8730' + - regex: '; *(T-Mobile myTouch) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: 'U8680' + - regex: '; *(T-Mobile_Espresso) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Espresso' + - regex: '; *(T-Mobile G1) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Dream' + - regex: '\b(T-Mobile ?)?(myTouch)[ _]?([34]G)[ _]?([^\/]*) (?:Mozilla|Build)' + device_replacement: '$1$2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$2 $3 $4' + - regex: '\b(T-Mobile)_([^_]+)_(.*) Build' + device_replacement: '$1 $2 $3' + brand_replacement: 'Tmobile' + model_replacement: '$2 $3' + - regex: '\b(T-Mobile)[_ ]?(.*?)Build' + device_replacement: '$1 $2' + brand_replacement: 'Tmobile' + model_replacement: '$2' + + ######### + # Tomtec + # @ref: http://www.tom-tec.eu/pages/tablets.php + ######### + - regex: ' (ATP[0-9]{4}) Build' + device_replacement: '$1' + brand_replacement: 'Tomtec' + model_replacement: '$1' + + ######### + # Tooky + # @ref: http://www.tookymobile.com/ + ######### + - regex: ' *(TOOKY)[ _\-]([^;/]+) ?(?:Build|;)' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Tooky' + model_replacement: '$2' + + ######### + # Toshiba + # @ref: http://www.toshiba.co.jp/ + # @missing: LT170, Thrive 7, TOSHIBA STB10 + ######### + - regex: '\b(TOSHIBA_AC_AND_AZ|TOSHIBA_FOLIO_AND_A|FOLIO_AND_A)' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: 'Folio 100' + - regex: '; *([Ff]olio ?100) Build/' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: 'Folio 100' + - regex: '; *(AT[0-9]{2,3}(?:\-A|LE\-A|PE\-A|SE|a)?|AT7-A|AT1S0|Hikari-iFrame/WDPF-[^;/]+|THRiVE|Thrive) Build/' + device_replacement: 'Toshiba $1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # Touchmate + # @ref: http://touchmatepc.com/new/ + ######### + - regex: '; *(TM-MID\d+[^;/]+|TOUCHMATE|MID-750) Build' + device_replacement: '$1' + brand_replacement: 'Touchmate' + model_replacement: '$1' + # @todo: needs verification user-agents missing + - regex: '; *(TM-SM\d+[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Touchmate' + model_replacement: '$1' + + ######### + # Treq + # @ref: http://www.treq.co.id/product + ######### + - regex: '; *(A10 [Bb]asic2?) Build/' + device_replacement: '$1' + brand_replacement: 'Treq' + model_replacement: '$1' + - regex: '; *(TREQ[ _\-])([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1$2' + brand_replacement: 'Treq' + model_replacement: '$2' + + ######### + # Umeox + # @ref: http://umeox.com/ + # @models: A936|A603|X-5|X-3 + ######### + # @todo: guessed markers + - regex: '; *(X-?5|X-?3) Build/' + device_replacement: '$1' + brand_replacement: 'Umeox' + model_replacement: '$1' + # @todo: guessed markers + - regex: '; *(A502\+?|A936|A603|X1|X2) Build/' + device_replacement: '$1' + brand_replacement: 'Umeox' + model_replacement: '$1' + + ######### + # Versus + # @ref: http://versusuk.com/support.html + ######### + - regex: '(TOUCH(?:TAB|PAD).+?) Build/' + regex_flag: 'i' + device_replacement: 'Versus $1' + brand_replacement: 'Versus' + model_replacement: '$1' + + ######### + # Vertu + # @ref: http://www.vertu.com/ + ######### + - regex: '(VERTU) ([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Vertu' + model_replacement: '$2' + + ######### + # Videocon + # @ref: http://www.videoconmobiles.com + ######### + - regex: '; *(Videocon)[ _\-]([^;/]+) *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Videocon' + model_replacement: '$2' + - regex: ' (VT\d{2}[A-Za-z]*) Build' + device_replacement: '$1' + brand_replacement: 'Videocon' + model_replacement: '$1' + + ######### + # Viewsonic + # @ref: http://viewsonic.com + ######### + - regex: '; *((?:ViewPad|ViewPhone|VSD)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Viewsonic' + model_replacement: '$1' + - regex: '; *(ViewSonic-)([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Viewsonic' + model_replacement: '$2' + - regex: '; *(GTablet.*) Build/' + device_replacement: '$1' + brand_replacement: 'Viewsonic' + model_replacement: '$1' + + ######### + # vivo + # @ref: http://vivo.cn/ + ######### + - regex: '; *([Vv]ivo)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'vivo' + model_replacement: '$2' + + ######### + # Vodafone (Operator Branded Devices) + # @ref: ?? + ######### + - regex: '(Vodafone) (.*) Build/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Walton + # @ref: http://www.waltonbd.com/ + ######### + - regex: '; *(?:Walton[ _\-])?(Primo[ _\-][^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Walton $1' + brand_replacement: 'Walton' + model_replacement: '$1' + + ######### + # Wiko + # @ref: http://fr.wikomobile.com/collection.php?s=Smartphones + ######### + - regex: '; *(?:WIKO[ \-])?(CINK\+?|BARRY|BLOOM|DARKFULL|DARKMOON|DARKNIGHT|DARKSIDE|FIZZ|HIGHWAY|IGGY|OZZY|RAINBOW|STAIRWAY|SUBLIM|WAX|CINK [^;/]+) Build/' + regex_flag: 'i' + device_replacement: 'Wiko $1' + brand_replacement: 'Wiko' + model_replacement: '$1' + + ######### + # WellcoM + # @ref: ?? + ######### + - regex: '; *WellcoM-([^;/]+) Build' + device_replacement: 'Wellcom $1' + brand_replacement: 'Wellcom' + model_replacement: '$1' + + ########## + # WeTab + # @ref: http://wetab.mobi/ + ########## + - regex: '(?:(WeTab)-Browser|; (wetab) Build)' + device_replacement: '$1' + brand_replacement: 'WeTab' + model_replacement: 'WeTab' + + ######### + # Wolfgang + # @ref: http://wolfgangmobile.com/ + ######### + - regex: '; *(AT-AS[^;/]+) Build' + device_replacement: 'Wolfgang $1' + brand_replacement: 'Wolfgang' + model_replacement: '$1' + + ######### + # Woxter + # @ref: http://www.woxter.es/es-es/categories/index + ######### + - regex: '; *(?:Woxter|Wxt) ([^;/]+) Build' + device_replacement: 'Woxter $1' + brand_replacement: 'Woxter' + model_replacement: '$1' + + ######### + # Yarvik Zania + # @ref: http://yarvik.com + ######### + - regex: '; *(?:Xenta |Luna )?(TAB[234][0-9]{2}|TAB0[78]-\d{3}|TAB0?9-\d{3}|TAB1[03]-\d{3}|SMP\d{2}-\d{3}) Build/' + device_replacement: 'Yarvik $1' + brand_replacement: 'Yarvik' + model_replacement: '$1' + + ######### + # Yifang + # @note: Needs to be at the very last as manufacturer builds for other brands. + # @ref: http://www.yifangdigital.com/ + # @models: M1010, M1011, M1007, M1008, M1005, M899, M899LP, M909, M8000, + # M8001, M8002, M8003, M849, M815, M816, M819, M805, M878, M780LPW, + # M778, M7000, M7000AD, M7000NBD, M7001, M7002, M7002KBD, M777, M767, + # M789, M799, M769, M757, M755, M753, M752, M739, M729, M723, M712, M727 + ######### + - regex: '; *([A-Z]{2,4})(M\d{3,}[A-Z]{2})([^;\)\/]*)(?: Build|[;\)])' + device_replacement: 'Yifang $1$2$3' + brand_replacement: 'Yifang' + model_replacement: '$2' + + ######### + # XiaoMi + # @ref: http://www.xiaomi.com/event/buyphone + ######### + - regex: '; *((MI|HM|MI-ONE|Redmi)[ -](NOTE |Note )?[^;/]*) (Build|MIUI)/' + device_replacement: 'XiaoMi $1' + brand_replacement: 'XiaoMi' + model_replacement: '$1' + + ######### + # Xolo + # @ref: http://www.xolo.in/ + ######### + - regex: '; *XOLO[ _]([^;/]*tab.*) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + - regex: '; *XOLO[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + - regex: '; *(q\d0{2,3}[a-z]?) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + + ######### + # Xoro + # @ref: http://www.xoro.de/produkte/ + ######### + - regex: '; *(PAD ?[79]\d+[^;/]*|TelePAD\d+[^;/]) Build' + device_replacement: 'Xoro $1' + brand_replacement: 'Xoro' + model_replacement: '$1' + + ######### + # Zopo + # @ref: http://www.zopomobiles.com/products.html + ######### + - regex: '; *(?:(?:ZOPO|Zopo)[ _]([^;/]+)|(ZP ?(?:\d{2}[^;/]+|C2))|(C[2379])) Build' + device_replacement: '$1$2$3' + brand_replacement: 'Zopo' + model_replacement: '$1$2$3' + + ######### + # ZiiLabs + # @ref: http://www.ziilabs.com/products/platforms/androidreferencetablets.php + ######### + - regex: '; *(ZiiLABS) (Zii[^;/]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZiiLabs' + model_replacement: '$2' + - regex: '; *(Zii)_([^;/]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZiiLabs' + model_replacement: '$2' + + ######### + # ZTE + # @ref: http://www.ztedevices.com/ + ######### + - regex: '; *(ARIZONA|(?:ATLAS|Atlas) W|D930|Grand (?:[SX][^;]*|Era|Memo[^;]*)|JOE|(?:Kis|KIS)\b[^;]*|Libra|Light [^;]*|N8[056][01]|N850L|N8000|N9[15]\d{2}|N9810|NX501|Optik|(?:Vip )Racer[^;]*|RacerII|RACERII|San Francisco[^;]*|V9[AC]|V55|V881|Z[679][0-9]{2}[A-z]?) Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *([A-Z]\d+)_USA_[^;]* Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(SmartTab\d+)[^;]* Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(?:Blade|BLADE|ZTE-BLADE)([^;/]*) Build' + device_replacement: 'ZTE Blade$1' + brand_replacement: 'ZTE' + model_replacement: 'Blade$1' + - regex: '; *(?:Skate|SKATE|ZTE-SKATE)([^;/]*) Build' + device_replacement: 'ZTE Skate$1' + brand_replacement: 'ZTE' + model_replacement: 'Skate$1' + - regex: '; *(Orange |Optimus )(Monte Carlo|San Francisco) Build' + device_replacement: '$1$2' + brand_replacement: 'ZTE' + model_replacement: '$1$2' + - regex: '; *(?:ZXY-ZTE_|ZTE\-U |ZTE[\- _]|ZTE-C[_ ])([^;/]+) Build' + device_replacement: 'ZTE $1' + brand_replacement: 'ZTE' + model_replacement: '$1' + # operator specific + - regex: '; (BASE) (lutea|Lutea 2|Tab[^;]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZTE' + model_replacement: '$1 $2' + - regex: '; (Avea inTouch 2|soft stone|tmn smart a7|Movistar[ _]Link) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(vp9plus)\)' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + + ########## + # Zync + # @ref: http://www.zync.in/index.php/our-products/tablet-phablets + ########## + - regex: '; ?(Cloud[ _]Z5|z1000|Z99 2G|z99|z930|z999|z990|z909|Z919|z900) Build/' + device_replacement: '$1' + brand_replacement: 'Zync' + model_replacement: '$1' + + ########## + # Kindle + # @note: Needs to be after Sony Playstation Vita as this UA contains Silk/3.2 + # @ref: https://developer.amazon.com/sdk/fire/specifications.html + # @ref: http://amazonsilk.wordpress.com/useful-bits/silk-user-agent/ + ########## + - regex: '; ?(KFOT|Kindle Fire) Build\b' + device_replacement: 'Kindle Fire' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire' + - regex: '; ?(KFOTE|Amazon Kindle Fire2) Build\b' + device_replacement: 'Kindle Fire 2' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire 2' + - regex: '; ?(KFTT) Build\b' + device_replacement: 'Kindle Fire HD' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 7"' + - regex: '; ?(KFJWI) Build\b' + device_replacement: 'Kindle Fire HD 8.9" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 8.9" WiFi' + - regex: '; ?(KFJWA) Build\b' + device_replacement: 'Kindle Fire HD 8.9" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 8.9" 4G' + - regex: '; ?(KFSOWI) Build\b' + device_replacement: 'Kindle Fire HD 7" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 7" WiFi' + - regex: '; ?(KFTHWI) Build\b' + device_replacement: 'Kindle Fire HDX 7" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 7" WiFi' + - regex: '; ?(KFTHWA) Build\b' + device_replacement: 'Kindle Fire HDX 7" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 7" 4G' + - regex: '; ?(KFAPWI) Build\b' + device_replacement: 'Kindle Fire HDX 8.9" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 8.9" WiFi' + - regex: '; ?(KFAPWA) Build\b' + device_replacement: 'Kindle Fire HDX 8.9" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 8.9" 4G' + - regex: '; ?Amazon ([^;/]+) Build\b' + device_replacement: '$1' + brand_replacement: 'Amazon' + model_replacement: '$1' + - regex: '; ?(Kindle) Build\b' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: 'Kindle' + - regex: '; ?(Silk)/(\d+)\.(\d+)(?:\.([0-9\-]+))? Build\b' + device_replacement: 'Kindle Fire' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire$2' + - regex: ' (Kindle)/(\d+\.\d+)' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: '$1 $2' + - regex: ' (Silk|Kindle)/(\d+)\.' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: 'Kindle' + + ######### + # Devices from chinese manufacturer(s) + # @note: identified by x-wap-profile http://218.249.47.94/Xianghe/.* + ######### + - regex: '(sprd)\-([^/]+)/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # @ref: http://eshinechina.en.alibaba.com/ + - regex: '; *(H\d{2}00\+?) Build' + device_replacement: '$1' + brand_replacement: 'Hero' + model_replacement: '$1' + - regex: '; *(iphone|iPhone5) Build/' + device_replacement: 'Xianghe $1' + brand_replacement: 'Xianghe' + model_replacement: '$1' + - regex: '; *(e\d{4}[a-z]?_?v\d+|v89_[^;/]+)[^;/]+ Build/' + device_replacement: 'Xianghe $1' + brand_replacement: 'Xianghe' + model_replacement: '$1' + + ######### + # Cellular + # @ref: + # @note: Operator branded devices + ######### + - regex: '\bUSCC[_\-]?([^ ;/\)]+)' + device_replacement: '$1' + brand_replacement: 'Cellular' + model_replacement: '$1' + + ###################################################################### + # Windows Phone Parsers + ###################################################################### + + ######### + # Alcatel Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:ALCATEL)[^;]*; *([^;,\)]+)' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + + ######### + # Asus Windows Phones + ######### + #~ - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:ASUS|Asus)[^;]*; *([^;,\)]+)' + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:ASUS|Asus)[^;]*; *([^;,\)]+)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + ######### + # Dell Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:DELL|Dell)[^;]*; *([^;,\)]+)' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ######### + # HTC Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:HTC|Htc|HTC_blocked[^;]*)[^;]*; *(?:HTC)?([^;,\)]+)' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + ######### + # Huawei Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:HUAWEI)[^;]*; *(?:HUAWEI )?([^;,\)]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + + ######### + # LG Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:LG|Lg)[^;]*; *(?:LG[ \-])?([^;,\)]+)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + + ######### + # Noka Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(?:NOKIA ?|Nokia ?|LUMIA ?|[Ll]umia ?)*(\d{3,}[^;\)]*)' + device_replacement: 'Lumia $1' + brand_replacement: 'Nokia' + model_replacement: 'Lumia $1' + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(RM-\d{3,})' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + - regex: '(?:Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)]|WPDesktop;) ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(?:NOKIA ?|Nokia ?|LUMIA ?|[Ll]umia ?)*([^;\)]+)' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + + ######### + # Microsoft Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:Microsoft(?: Corporation)?)[^;]*; *([^;,\)]+)' + device_replacement: 'Microsoft $1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + + ######### + # Samsung Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:SAMSUNG)[^;]*; *(?:SAMSUNG )?([^;,\.\)]+)' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Toshiba Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:TOSHIBA|FujitsuToshibaMobileCommun)[^;]*; *([^;,\)]+)' + device_replacement: 'Toshiba $1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # Generic Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?([^;]+); *([^;,\)]+)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ###################################################################### + # Other Devices Parser + ###################################################################### + + ######### + # Samsung Bada Phones + ######### + - regex: '(?:^|; )SAMSUNG\-([A-Za-z0-9\-]+).* Bada/' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Firefox OS + ######### + - regex: '\(Mobile; ALCATEL ?(One|ONE) ?(Touch|TOUCH) ?([^;/]+)(?:/[^;]+)?; rv:[^\)]+\) Gecko/[^\/]+ Firefox/' + device_replacement: 'Alcatel $1 $2 $3' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $3' + - regex: '\(Mobile; (?:ZTE([^;]+)|(OpenC)); rv:[^\)]+\) Gecko/[^\/]+ Firefox/' + device_replacement: 'ZTE $1$2' + brand_replacement: 'ZTE' + model_replacement: '$1$2' + + ########## + # NOKIA + # @note: NokiaN8-00 comes before iphone. Sometimes spoofs iphone + ########## + - regex: 'Nokia(N[0-9]+)([A-z_\-][A-z0-9_\-]*)' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1$2' + - regex: '(?:NOKIA|Nokia)(?:\-| *)(?:([A-Za-z0-9]+)\-[0-9a-f]{32}|([A-Za-z0-9\-]+)(?:UCBrowser)|([A-Za-z0-9\-]+))' + device_replacement: 'Nokia $1$2$3' + brand_replacement: 'Nokia' + model_replacement: '$1$2$3' + - regex: 'Lumia ([A-Za-z0-9\-]+)' + device_replacement: 'Lumia $1' + brand_replacement: 'Nokia' + model_replacement: 'Lumia $1' + # UCWEB Browser on Symbian + - regex: '\(Symbian; U; S60 V5; [A-z]{2}\-[A-z]{2}; (SonyEricsson|Samsung|Nokia|LG)([^;/]+)\)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # Nokia Symbian + - regex: '\(Symbian(?:/3)?; U; ([^;]+);' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + + ########## + # BlackBerry + # @ref: http://www.useragentstring.com/pages/BlackBerry/ + ########## + - regex: 'BB10; ([A-Za-z0-9\- ]+)\)' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Play[Bb]ook.+RIM Tablet OS' + device_replacement: 'BlackBerry Playbook' + brand_replacement: 'BlackBerry' + model_replacement: 'Playbook' + - regex: 'Black[Bb]erry ([0-9]+);' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Black[Bb]erry([0-9]+)' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Black[Bb]erry;' + device_replacement: 'BlackBerry' + brand_replacement: 'BlackBerry' + + ########## + # PALM / HP + # @note: some palm devices must come before iphone. sometimes spoofs iphone in ua + ########## + - regex: '(Pre|Pixi)/\d+\.\d+' + device_replacement: 'Palm $1' + brand_replacement: 'Palm' + model_replacement: '$1' + - regex: 'Palm([0-9]+)' + device_replacement: 'Palm $1' + brand_replacement: 'Palm' + model_replacement: '$1' + - regex: 'Treo([A-Za-z0-9]+)' + device_replacement: 'Palm Treo $1' + brand_replacement: 'Palm' + model_replacement: 'Treo $1' + - regex: 'webOS.*(P160U(?:NA)?)/(\d+).(\d+)' + device_replacement: 'HP Veer' + brand_replacement: 'HP' + model_replacement: 'Veer' + - regex: '(Touch[Pp]ad)/\d+\.\d+' + device_replacement: 'HP TouchPad' + brand_replacement: 'HP' + model_replacement: 'TouchPad' + - regex: 'HPiPAQ([A-Za-z0-9]+)/\d+.\d+' + device_replacement: 'HP iPAQ $1' + brand_replacement: 'HP' + model_replacement: 'iPAQ $1' + - regex: 'PDA; (PalmOS)/sony/model ([a-z]+)/Revision' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1 $2' + + ########## + # AppleTV + # No built in browser that I can tell + # Stack Overflow indicated iTunes-AppleTV/4.1 as a known UA for app available and I'm seeing it in live traffic + ########## + - regex: '(Apple\s?TV)' + device_replacement: 'AppleTV' + brand_replacement: 'Apple' + model_replacement: 'AppleTV' + + ######### + # Tesla Model S + ######### + - regex: '(QtCarBrowser)' + device_replacement: 'Tesla Model S' + brand_replacement: 'Tesla' + model_replacement: 'Model S' + + ########## + # iSTUFF + # @note: complete but probably catches spoofs + # ipad and ipod must be parsed before iphone + # cannot determine specific device type from ua string. (3g, 3gs, 4, etc) + ########## + # @note: on some ua the device can be identified e.g. iPhone5,1 + - regex: '((?:iPhone|iPad|iPod)\d+,\d+)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + # @note: iPad needs to be before iPhone + - regex: '(iPad)(?:;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + - regex: '(iPod)(?:;| touch;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + - regex: '(iPhone)(?:;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + # @note: desktop applications show device info + - regex: 'CFNetwork/.* Darwin/\d.*\(((?:Mac|iMac|PowerMac|PowerBook)[^\d]*)(\d+)(?:,|%2C)(\d+)' + device_replacement: '$1$2,$3' + brand_replacement: 'Apple' + model_replacement: '$1$2,$3' + # @note: iOS applications do not show device info + - regex: 'CFNetwork/.* Darwin/\d' + device_replacement: 'iOS-Device' + brand_replacement: 'Apple' + model_replacement: 'iOS-Device' + + ########## + # Acer + ########## + - regex: 'acer_([A-Za-z0-9]+)_' + device_replacement: 'Acer $1' + brand_replacement: 'Acer' + model_replacement: '$1' + + ########## + # Alcatel + ########## + - regex: '(?:ALCATEL|Alcatel)-([A-Za-z0-9\-]+)' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + + ########## + # Amoi + ########## + - regex: '(?:Amoi|AMOI)\-([A-Za-z0-9]+)' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + + ########## + # Asus + ########## + - regex: '(?:; |\/|^)((?:Transformer (?:Pad|Prime) |Transformer |PadFone[ _]?)[A-Za-z0-9]*)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + - regex: '(?:asus.*?ASUS|Asus|ASUS|asus)[\- ;]*((?:Transformer (?:Pad|Prime) |Transformer |Padfone |Nexus[ _])?[A-Za-z0-9]+)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + + ########## + # Bird + ########## + - regex: '\bBIRD[ \-\.]([A-Za-z0-9]+)' + device_replacement: 'Bird $1' + brand_replacement: 'Bird' + model_replacement: '$1' + + ########## + # Dell + ########## + - regex: '\bDell ([A-Za-z0-9]+)' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ########## + # DoCoMo + ########## + - regex: 'DoCoMo/2\.0 ([A-Za-z0-9]+)' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + - regex: '([A-Za-z0-9]+)_W;FOMA' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + - regex: '([A-Za-z0-9]+);FOMA' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + + ########## + # htc + ########## + - regex: '\b(?:HTC/|HTC/[a-z0-9]+/)?HTC[ _\-;]? *(.*?)(?:-?Mozilla|fingerPrint|[;/\(\)]|$)' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + ########## + # Huawei + ########## + - regex: 'Huawei([A-Za-z0-9]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: 'HUAWEI-([A-Za-z0-9]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: 'vodafone([A-Za-z0-9]+)' + device_replacement: 'Huawei Vodafone $1' + brand_replacement: 'Huawei' + model_replacement: 'Vodafone $1' + + ########## + # i-mate + ########## + - regex: 'i\-mate ([A-Za-z0-9]+)' + device_replacement: 'i-mate $1' + brand_replacement: 'i-mate' + model_replacement: '$1' + + ########## + # kyocera + ########## + - regex: 'Kyocera\-([A-Za-z0-9]+)' + device_replacement: 'Kyocera $1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + - regex: 'KWC\-([A-Za-z0-9]+)' + device_replacement: 'Kyocera $1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + + ########## + # lenovo + ########## + - regex: 'Lenovo[_\-]([A-Za-z0-9]+)' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + + ########## + # HbbTV (European and Australian standard) + # written before the LG regexes, as LG is making HbbTV too + ########## + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+ \([^;]*; *(LG)E *; *([^;]*) *;[^;]*;[^;]*;\)' + device_replacement: '$1' + brand_replacement: '$2' + model_replacement: '$3' + - regex: '(HbbTV)/1\.1\.1.*CE-HTML/1\.\d;(Vendor/)*(THOM[^;]*?)[;\s](?:.*SW-Version/.*)*(LF[^;]+);?' + device_replacement: '$1' + brand_replacement: 'Thomson' + model_replacement: '$4' + - regex: '(HbbTV)(?:/1\.1\.1)?(?: ?\(;;;;;\))?; *CE-HTML(?:/1\.\d)?; *([^ ]+) ([^;]+);' + device_replacement: '$1' + brand_replacement: '$2' + model_replacement: '$3' + - regex: '(HbbTV)/1\.1\.1 \(;;;;;\) Maple_2011' + device_replacement: '$1' + brand_replacement: 'Samsung' + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+ \([^;]*; *(?:CUS:([^;]*)|([^;]+)) *; *([^;]*) *;.*;' + device_replacement: '$1' + brand_replacement: '$2$3' + model_replacement: '$4' + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+' + device_replacement: '$1' + + ########## + # LGE NetCast TV + ########## + - regex: 'LGE; (?:Media\/)?([^;]*);[^;]*;[^;]*;?\); "?LG NetCast(\.TV|\.Media|)-\d+' + device_replacement: 'NetCast$2' + brand_replacement: 'LG' + model_replacement: '$1' + + ########## + # InettvBrowser + ########## + - regex: 'InettvBrowser/[0-9]+\.[0-9A-Z]+ \([^;]*;(Sony)([^;]*);[^;]*;[^\)]*\)' + device_replacement: 'Inettv' + brand_replacement: '$1' + model_replacement: '$2' + - regex: 'InettvBrowser/[0-9]+\.[0-9A-Z]+ \([^;]*;([^;]*);[^;]*;[^\)]*\)' + device_replacement: 'Inettv' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + - regex: '(?:InettvBrowser|TSBNetTV|NETTV|HBBTV)' + device_replacement: 'Inettv' + brand_replacement: 'Generic_Inettv' + + ########## + # lg + ########## + # LG Symbian Phones + - regex: 'Series60/\d\.\d (LG)[\-]?([A-Za-z0-9 \-]+)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # other LG phones + - regex: '\b(?:LGE[ \-]LG\-(?:AX)?|LGE |LGE?-LG|LGE?[ \-]|LG[ /\-]|lg[\-])([A-Za-z0-9]+)\b' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '(?:^LG[\-]?|^LGE[\-/]?)([A-Za-z]+[0-9]+[A-Za-z]*)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '^LG([0-9]+[A-Za-z]*)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + + ########## + # microsoft + ########## + - regex: '(KIN\.[^ ]+) (\d+)\.(\d+)' + device_replacement: 'Microsoft $1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + - regex: '(?:MSIE|XBMC).*\b(Xbox)\b' + device_replacement: '$1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + - regex: '; ARM; Trident/6\.0; Touch[\);]' + device_replacement: 'Microsoft Surface RT' + brand_replacement: 'Microsoft' + model_replacement: 'Surface RT' + + ########## + # motorola + ########## + - regex: 'Motorola\-([A-Za-z0-9]+)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: 'MOTO\-([A-Za-z0-9]+)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: 'MOT\-([A-z0-9][A-z0-9\-]*)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + + ########## + # nintendo + ########## + - regex: 'Nintendo WiiU' + device_replacement: 'Nintendo Wii U' + brand_replacement: 'Nintendo' + model_replacement: 'Wii U' + - regex: 'Nintendo (DS|3DS|DSi|Wii);' + device_replacement: 'Nintendo $1' + brand_replacement: 'Nintendo' + model_replacement: '$1' + + ########## + # pantech + ########## + - regex: '(?:Pantech|PANTECH)[ _-]?([A-Za-z0-9\-]+)' + device_replacement: 'Pantech $1' + brand_replacement: 'Pantech' + model_replacement: '$1' + + ########## + # philips + ########## + - regex: 'Philips([A-Za-z0-9]+)' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + - regex: 'Philips ([A-Za-z0-9]+)' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + + ########## + # Samsung + ########## + # Samsung Symbian Devices + - regex: 'SymbianOS/9\.\d.* Samsung[/\-]([A-Za-z0-9 \-]+)' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '(Samsung)(SGH)(i[0-9]+)' + device_replacement: '$1 $2$3' + brand_replacement: '$1' + model_replacement: '$2-$3' + - regex: 'SAMSUNG-ANDROID-MMS/([^;/]+)' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: '$1' + # Other Samsung + #- regex: 'SAMSUNG(?:; |-)([A-Za-z0-9\-]+)' + - regex: 'SAMSUNG(?:; |[ -/])([A-Za-z0-9\-]+)' + regex_flag: 'i' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ########## + # Sega + ########## + - regex: '(Dreamcast)' + device_replacement: 'Sega $1' + brand_replacement: 'Sega' + model_replacement: '$1' + + ########## + # Siemens mobile + ########## + - regex: '^SIE-([A-Za-z0-9]+)' + device_replacement: 'Siemens $1' + brand_replacement: 'Siemens' + model_replacement: '$1' + + ########## + # Softbank + ########## + - regex: 'Softbank/[12]\.0/([A-Za-z0-9]+)' + device_replacement: 'Softbank $1' + brand_replacement: 'Softbank' + model_replacement: '$1' + + ########## + # SonyEricsson + ########## + - regex: 'SonyEricsson ?([A-Za-z0-9\-]+)' + device_replacement: 'Ericsson $1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + + ########## + # Sony + ########## + - regex: 'Android [^;]+; ([^ ]+) (Sony)/' + device_replacement: '$2 $1' + brand_replacement: '$2' + model_replacement: '$1' + - regex: '(Sony)(?:BDP\/|\/)?([^ /;\)]+)[ /;\)]' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Puffin Browser Device detect + # A=Android, I=iOS, P=Phone, T=Tablet + # AT=Android+Tablet + ######### + - regex: 'Puffin/[\d\.]+IT' + device_replacement: 'iPad' + brand_replacement: 'Apple' + model_replacement: 'iPad' + - regex: 'Puffin/[\d\.]+IP' + device_replacement: 'iPhone' + brand_replacement: 'Apple' + model_replacement: 'iPhone' + - regex: 'Puffin/[\d\.]+AT' + device_replacement: 'Generic Tablet' + brand_replacement: 'Generic' + model_replacement: 'Tablet' + - regex: 'Puffin/[\d\.]+AP' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + + ######### + # Android General Device Matching (far from perfect) + ######### + - regex: 'Android[\- ][\d]+\.[\d]+; [A-Za-z]{2}\-[A-Za-z]{0,2}; WOWMobile (.+) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+\.[\d]+\-update1; [A-Za-z]{2}\-[A-Za-z]{0,2} *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[A-Za-z]{2}[_\-][A-Za-z]{0,2}\-? *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[A-Za-z]{0,2}\- *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + # No build info at all - "Build" follows locale immediately + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[a-z]{0,2}[_\-]?[A-Za-z]{0,2};? Build' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *\-?[A-Za-z]{2}; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}(?:;.*)?; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + + ########## + # Google TV + ########## + - regex: '(GoogleTV)' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + + ########## + # WebTV + ########## + - regex: '(WebTV)/\d+.\d+' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-\d+\.\d+' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + + ########## + # Generic Tablet + ########## + - regex: '(Android 3\.\d|Opera Tablet|Tablet; .+Firefox/|Android.*(?:Tab|Pad))' + regex_flag: 'i' + device_replacement: 'Generic Tablet' + brand_replacement: 'Generic' + model_replacement: 'Tablet' + + ########## + # Generic Smart Phone + ########## + - regex: '(Symbian|\bS60(Version|V\d)|\bS60\b|\((Series 60|Windows Mobile|Palm OS|Bada); Opera Mini|Windows CE|Opera Mobi|BREW|Brew|Mobile; .+Firefox/|iPhone OS|Android|MobileSafari|Windows *Phone|\(webOS/|PalmOS)' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + - regex: '(hiptop|avantgo|plucker|xiino|blazer|elaine)' + regex_flag: 'i' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + + ########## + # Spiders (this is hack...) + ########## + - regex: '(bot|zao|borg|DBot|oegp|silk|Xenu|zeal|^NING|CCBot|crawl|htdig|lycos|slurp|teoma|voila|yahoo|Sogou|CiBra|Nutch|^Java/|^JNLP/|Daumoa|Genieo|ichiro|larbin|pompos|Scrapy|snappy|speedy|spider|msnbot|msrbot|vortex|^vortex|crawler|favicon|indexer|Riddler|scooter|scraper|scrubby|WhatWeb|WinHTTP|bingbot|openbot|gigabot|furlbot|polybot|seekbot|^voyager|archiver|Icarus6j|mogimogi|Netvibes|blitzbot|altavista|charlotte|findlinks|Retreiver|TLSProber|WordPress|SeznamBot|ProoXiBot|wsr\-agent|Squrl Java|EtaoSpider|PaperLiBot|SputnikBot|A6\-Indexer|netresearch|searchsight|baiduspider|YisouSpider|ICC\-Crawler|http%20client|Python-urllib|dataparksearch|converacrawler|Screaming Frog|AppEngine-Google|YahooCacheSystem|fast\-webcrawler|Sogou Pic Spider|semanticdiscovery|Innovazion Crawler|facebookexternalhit|Google.*/\+/web/snippet|Google-HTTP-Java-Client|BlogBridge|IlTrovatore-Setaccio|InternetArchive|GomezAgent|WebThumbnail|heritrix|NewsGator|PagePeeker|Reaper|ZooShot|holmes)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Desktop' + + ########## + # Generic Feature Phone + # take care to do case insensitive matching + ########## + - regex: '^(1207|3gso|4thp|501i|502i|503i|504i|505i|506i|6310|6590|770s|802s|a wa|acer|acs\-|airn|alav|asus|attw|au\-m|aur |aus |abac|acoo|aiko|alco|alca|amoi|anex|anny|anyw|aptu|arch|argo|bmobile|bell|bird|bw\-n|bw\-u|beck|benq|bilb|blac|c55/|cdm\-|chtm|capi|comp|cond|dall|dbte|dc\-s|dica|ds\-d|ds12|dait|devi|dmob|doco|dopo|dorado|el(?:38|39|48|49|50|55|58|68)|el[3456]\d{2}dual|erk0|esl8|ex300|ez40|ez60|ez70|ezos|ezze|elai|emul|eric|ezwa|fake|fly\-|fly_|g\-mo|g1 u|g560|gf\-5|grun|gene|go.w|good|grad|hcit|hd\-m|hd\-p|hd\-t|hei\-|hp i|hpip|hs\-c|htc |htc\-|htca|htcg)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(htcp|htcs|htct|htc_|haie|hita|huaw|hutc|i\-20|i\-go|i\-ma|i\-mobile|i230|iac|iac\-|iac/|ig01|im1k|inno|iris|jata|kddi|kgt|kgt/|kpt |kwc\-|klon|lexi|lg g|lg\-a|lg\-b|lg\-c|lg\-d|lg\-f|lg\-g|lg\-k|lg\-l|lg\-m|lg\-o|lg\-p|lg\-s|lg\-t|lg\-u|lg\-w|lg/k|lg/l|lg/u|lg50|lg54|lge\-|lge/|leno|m1\-w|m3ga|m50/|maui|mc01|mc21|mcca|medi|meri|mio8|mioa|mo01|mo02|mode|modo|mot |mot\-|mt50|mtp1|mtv |mate|maxo|merc|mits|mobi|motv|mozz|n100|n101|n102|n202|n203|n300|n302|n500|n502|n505|n700|n701|n710|nec\-|nem\-|newg|neon)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(netf|noki|nzph|o2 x|o2\-x|opwv|owg1|opti|oran|ot\-s|p800|pand|pg\-1|pg\-2|pg\-3|pg\-6|pg\-8|pg\-c|pg13|phil|pn\-2|pt\-g|palm|pana|pire|pock|pose|psio|qa\-a|qc\-2|qc\-3|qc\-5|qc\-7|qc07|qc12|qc21|qc32|qc60|qci\-|qwap|qtek|r380|r600|raks|rim9|rove|s55/|sage|sams|sc01|sch\-|scp\-|sdk/|se47|sec\-|sec0|sec1|semc|sgh\-|shar|sie\-|sk\-0|sl45|slid|smb3|smt5|sp01|sph\-|spv |spv\-|sy01|samm|sany|sava|scoo|send|siem|smar|smit|soft|sony|t\-mo|t218|t250|t600|t610|t618|tcl\-|tdg\-|telm|tim\-|ts70|tsm\-|tsm3|tsm5|tx\-9|tagt)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(talk|teli|topl|tosh|up.b|upg1|utst|v400|v750|veri|vk\-v|vk40|vk50|vk52|vk53|vm40|vx98|virg|vertu|vite|voda|vulc|w3c |w3c\-|wapj|wapp|wapu|wapm|wig |wapi|wapr|wapv|wapy|wapa|waps|wapt|winc|winw|wonu|x700|xda2|xdag|yas\-|your|zte\-|zeto|aste|audi|avan|blaz|brew|brvw|bumb|ccwa|cell|cldc|cmd\-|dang|eml2|fetc|hipt|http|ibro|idea|ikom|ipaq|jbro|jemu|jigs|keji|kyoc|kyok|libw|m\-cr|midp|mmef|moto|mwbp|mywa|newt|nok6|o2im|pant|pdxg|play|pluc|port|prox|rozo|sama|seri|smal|symb|treo|upsi|vx52|vx53|vx60|vx61|vx70|vx80|vx81|vx83|vx85|wap\-|webc|whit|wmlb|xda\-|xda_)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(Ice)$' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '(wap[\-\ ]browser|maui|netfront|obigo|teleca|up\.browser|midp|Opera Mini)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' diff --git a/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java new file mode 100644 index 00000000000..52a10840387 --- /dev/null +++ b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java @@ -0,0 +1,174 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class UserAgentProcessorFactoryTests extends ESTestCase { + + private static Map userAgentParsers; + + private static String regexWithoutDevicesFilename = "regexes_without_devices.yaml"; + private static Path userAgentConfigDir; + + @BeforeClass + public static void createUserAgentParsers() throws IOException { + Path configDir = createTempDir(); + userAgentConfigDir = configDir.resolve("ingest-useragent"); + Files.createDirectories(userAgentConfigDir); + + // Copy file, leaving out the device parsers at the end + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"), StandardCharsets.UTF_8)); + BufferedWriter writer = Files.newBufferedWriter(userAgentConfigDir.resolve(regexWithoutDevicesFilename));) { + String line; + while ((line = reader.readLine()) != null) { + if (line.startsWith("device_parsers:")) { + break; + } + + writer.write(line); + writer.newLine(); + } + } + + userAgentParsers = IngestUserAgentPlugin.createUserAgentParsers(userAgentConfigDir, new UserAgentCache(1000)); + } + + public void testBuildDefaults() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + + String processorTag = randomAsciiOfLength(10); + + UserAgentProcessor processor = factory.create(processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("useragent")); + assertThat(processor.getUaParser().getUaPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getDevicePatterns().size(), greaterThan(0)); + assertThat(processor.getProperties(), equalTo(EnumSet.allOf(UserAgentProcessor.Property.class))); + } + + public void testBuildTargetField() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("target_field", "_target_field"); + + UserAgentProcessor processor = factory.create(null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("_target_field")); + } + + public void testBuildRegexFile() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("regex_file", regexWithoutDevicesFilename); + + UserAgentProcessor processor = factory.create(null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getUaParser().getUaPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getDevicePatterns().size(), equalTo(0)); + } + + public void testBuildNonExistingRegexFile() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("regex_file", "does-not-exist.yaml"); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, config)); + assertThat(e.getMessage(), equalTo("[regex_file] regex file [does-not-exist.yaml] doesn't exist (has to exist at node startup)")); + } + + public void testBuildFields() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Set properties = EnumSet.noneOf(UserAgentProcessor.Property.class); + List fieldNames = new ArrayList<>(); + int numFields = scaledRandomIntBetween(1, UserAgentProcessor.Property.values().length); + for (int i = 0; i < numFields; i++) { + UserAgentProcessor.Property property = UserAgentProcessor.Property.values()[i]; + properties.add(property); + fieldNames.add(property.name().toLowerCase(Locale.ROOT)); + } + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", fieldNames); + + UserAgentProcessor processor = factory.create(null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getProperties(), equalTo(properties)); + } + + public void testInvalidProperty() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", Collections.singletonList("invalid")); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, config)); + assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, " + + "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD]")); + } + + public void testInvalidPropertiesType() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", "invalid"); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, config)); + assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); + } +} diff --git a/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java new file mode 100644 index 00000000000..d9b5eed059a --- /dev/null +++ b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.useragent.UserAgentProcessor; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.io.InputStream; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; + +public class UserAgentProcessorTests extends ESTestCase { + + private static UserAgentProcessor processor; + + @BeforeClass + public static void setupProcessor() throws IOException { + InputStream regexStream = UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"); + assertNotNull(regexStream); + + UserAgentParser parser = new UserAgentParser(randomAsciiOfLength(10), regexStream, new UserAgentCache(1000)); + + processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", parser, + EnumSet.allOf(UserAgentProcessor.Property.class)); + } + + @SuppressWarnings("unchecked") + public void testCommonBrowser() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Chrome")); + assertThat(target.get("major"), is("33")); + assertThat(target.get("minor"), is("0")); + assertThat(target.get("patch"), is("1750")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Mac OS X 10.9.2")); + assertThat(target.get("os_name"), is("Mac OS X")); + assertThat(target.get("os_major"), is("10")); + assertThat(target.get("os_minor"), is("9")); + + assertThat(target.get("device"), is("Other")); + } + + @SuppressWarnings("unchecked") + public void testUncommonDevice() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10+ " + + "(KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Android")); + assertThat(target.get("major"), is("3")); + assertThat(target.get("minor"), is("0")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Android 3.0")); + assertThat(target.get("os_name"), is("Android")); + assertThat(target.get("os_major"), is("3")); + assertThat(target.get("os_minor"), is("0")); + + assertThat(target.get("device"), is("Motorola Xoom")); + } + + @SuppressWarnings("unchecked") + public void testSpider() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("EasouSpider")); + assertNull(target.get("major")); + assertNull(target.get("minor")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Other")); + assertThat(target.get("os_name"), is("Other")); + assertNull(target.get("os_major")); + assertNull(target.get("os_minor")); + + assertThat(target.get("device"), is("Spider")); + } + + @SuppressWarnings("unchecked") + public void testUnknown() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Something I made up v42.0.1"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Other")); + assertNull(target.get("major")); + assertNull(target.get("minor")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Other")); + assertThat(target.get("os_name"), is("Other")); + assertNull(target.get("os_major")); + assertNull(target.get("os_minor")); + + assertThat(target.get("device"), is("Other")); + } +} + diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java similarity index 54% rename from modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java rename to plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java index 38d48b98f4e..010f85f671a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java +++ b/plugins/ingest-useragent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java @@ -16,26 +16,25 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.script.mustache; -import com.fasterxml.jackson.core.io.JsonStringEncoder; -import com.github.mustachejava.DefaultMustacheFactory; -import com.github.mustachejava.MustacheException; +package org.elasticsearch.ingest.useragent; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.io.Writer; -/** - * A MustacheFactory that does simple JSON escaping. - */ -final class JsonEscapingMustacheFactory extends DefaultMustacheFactory { +public class UserAgentRestIT extends ESRestTestCase { - @Override - public void encode(String value, Writer writer) { - try { - writer.write(JsonStringEncoder.getInstance().quoteAsString(value)); - } catch (IOException e) { - throw new MustacheException("Failed to encode value: " + value); - } + public UserAgentRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml new file mode 100644 index 00000000000..d37ad674cb9 --- /dev/null +++ b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml @@ -0,0 +1,11 @@ +"ingest-useragent plugin installed": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-useragent } + - match: { nodes.$master.ingest.processors.0.type: useragent } diff --git a/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml new file mode 100644 index 00000000000..13451a075d7 --- /dev/null +++ b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml @@ -0,0 +1,86 @@ +--- +"Test user agent processor with defaults": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "useragent" : { + "field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.useragent.name: "Chrome" } + - match: { _source.useragent.os: "Mac OS X 10.9.2" } + - match: { _source.useragent.os_name: "Mac OS X" } + - match: { _source.useragent.os_major: "10" } + - match: { _source.useragent.os_minor: "9" } + - match: { _source.useragent.major: "33" } + - match: { _source.useragent.minor: "0" } + - match: { _source.useragent.patch: "1750" } + - match: { _source.useragent.device: "Other" } + +--- +"Test user agent processor with parameters": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "useragent" : { + "field" : "field1", + "target_field": "field2", + "properties": ["os"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.field2.os: "Mac OS X 10.9.2" } + - is_false: _source.useragent + - is_false: _source.field2.name + - is_false: _source.field2.os_name + - is_false: _source.field2.os_major + - is_false: _source.field2.os_minor + - is_false: _source.field2.major + - is_false: _source.field2.minor + - is_false: _source.field2.patch + - is_false: _source.field2.device diff --git a/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml new file mode 100644 index 00000000000..3472cd03c3f --- /dev/null +++ b/plugins/ingest-useragent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml @@ -0,0 +1,42 @@ +--- +"Test user agent processor with custom regex file": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "useragent" : { + "field": "field1", + "regex_file": "test-regexes.yaml" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.useragent.name: "Test" } + - match: { _source.useragent.os: "Other" } + - match: { _source.useragent.os_name: "Other" } + - match: { _source.useragent.device: "Other" } + - is_false: _source.useragent.os_major + - is_false: _source.useragent.os_minor + - is_false: _source.useragent.major + - is_false: _source.useragent.minor + - is_false: _source.useragent.patch diff --git a/plugins/ingest-useragent/test/test-regexes.yaml b/plugins/ingest-useragent/test/test-regexes.yaml new file mode 100644 index 00000000000..e41dec700c0 --- /dev/null +++ b/plugins/ingest-useragent/test/test-regexes.yaml @@ -0,0 +1,3 @@ +user_agent_parsers: + - regex: '.*' + family_replacement: 'Test' \ No newline at end of file diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java index d5e0a62ecb5..b27c3fad2b8 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugin.example; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -38,26 +38,26 @@ public class ExampleCatAction extends AbstractCatAction { private final ExamplePluginConfiguration config; @Inject - public ExampleCatAction(Settings settings, RestController controller, - Client client, ExamplePluginConfiguration config) { - super(settings, controller, client); + public ExampleCatAction(Settings settings, RestController controller, ExamplePluginConfiguration config) { + super(settings); this.config = config; controller.registerHandler(GET, "/_cat/configured_example", this); } @Override - protected void doRequest(final RestRequest request, final RestChannel channel, final Client client) { + protected void doRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { Table table = getTableWithHeader(request); table.startRow(); table.addCell(config.getTestConfig()); table.endRow(); try { channel.sendResponse(RestTable.buildResponse(table, channel)); - } catch (Throwable e) { + } catch (Exception e) { try { channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (Throwable e1) { - logger.error("failed to send failure response", e1); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.error("failed to send failure response", inner); } } } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java index c2f1214e72b..634a4ca6dfa 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java @@ -35,9 +35,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; -/** - * - */ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); @@ -64,9 +61,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -106,9 +103,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -147,9 +144,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), runtimeVars).run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index b642b7b0a73..5a16c06d4dc 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -261,7 +261,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri } /** Evaluates with reduced privileges */ - private final PyObject evalRestricted(final PyCode code) { + private PyObject evalRestricted(final PyCode code) { // eval the script with reduced privileges return AccessController.doPrivileged(new PrivilegedAction() { @Override diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java index abf9f661a6d..0a887bc9a7e 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java @@ -66,9 +66,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -109,9 +109,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { // long result = ((Number) script.run(runtimeVars)).longValue(); // assertThat(result, equalTo(addition)); // } -// } catch (Throwable t) { +// } catch (Exception e) { // failed.set(true); -// logger.error("failed", t); +// logger.error("failed", e); // } finally { // latch.countDown(); // } @@ -151,9 +151,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { long result = ((Number) se.executable(compiledScript, runtimeVars).run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 79174e54c62..06e51686823 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -482,7 +482,7 @@ public class AttachmentMapper extends FieldMapper { String parsedContent; try { parsedContent = TikaImpl.parse(content, metadata, indexedChars); - } catch (Throwable e) { + } catch (Exception e) { // #18: we could ignore errors when Tika does not parse data if (!ignoreErrors) { logger.trace("exception caught", e); @@ -508,8 +508,8 @@ public class AttachmentMapper extends FieldMapper { } context = context.createExternalValueContext(language); languageMapper.parse(context); - } catch(Throwable t) { - logger.debug("Cannot detect language: [{}]", t.getMessage()); + } catch(Exception e) { + logger.debug("Cannot detect language: [{}]", e.getMessage()); } } diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java index d523e0f0184..6cf957f05c0 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java @@ -19,18 +19,20 @@ package org.elasticsearch.mapper.attachments; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -import java.util.Arrays; -import java.util.List; - -public class MapperAttachmentsPlugin extends Plugin { +public class MapperAttachmentsPlugin extends Plugin implements MapperPlugin { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); private static DeprecationLogger deprecationLogger = new DeprecationLogger(logger); @@ -44,7 +46,8 @@ public class MapperAttachmentsPlugin extends Plugin { AttachmentMapper.INDEX_ATTACHMENT_INDEXED_CHARS_SETTING); } - public void onModule(IndicesModule indicesModule) { - indicesModule.registerMapper("attachment", new AttachmentMapper.TypeParser()); + @Override + public Map getMappers() { + return Collections.singletonMap("attachment", new AttachmentMapper.TypeParser()); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java index e0d5d7a2ec6..29dfff66d96 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java @@ -19,12 +19,18 @@ package org.elasticsearch.mapper.attachments; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -33,9 +39,10 @@ public abstract class AttachmentUnitTestCase extends ESTestCase { protected Settings testSettings; protected static IndicesModule getIndicesModuleWithRegisteredAttachmentMapper() { - IndicesModule indicesModule = new IndicesModule(new NamedWriteableRegistry()); - indicesModule.registerMapper(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser()); - return indicesModule; + return newTestIndicesModule( + Collections.singletonMap(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser()), + Collections.emptyMap() + ); } @Before diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 8c86800e52b..6b80baa7c28 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -116,7 +116,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { .endObject() .endObject(); - byte[] mapping = mappingBuilder.bytes().toBytes(); + byte[] mapping = BytesReference.toBytes(mappingBuilder.bytes()); MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); DocumentMapper docMapper = mapperService.parse("mail", new CompressedXContent(mapping), true); // this should not throw an exception diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java index fbbdeb83a7d..b32a6ab79a0 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java @@ -58,7 +58,7 @@ public class TikaDocTests extends ESTestCase { assertNotNull(parsedContent); assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/plugin/mapper/MapperMurmur3Plugin.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/plugin/mapper/MapperMurmur3Plugin.java index 384fc4272f6..987a4cf9bc0 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/plugin/mapper/MapperMurmur3Plugin.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/plugin/mapper/MapperMurmur3Plugin.java @@ -19,14 +19,18 @@ package org.elasticsearch.plugin.mapper; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.murmur3.Murmur3FieldMapper; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -public class MapperMurmur3Plugin extends Plugin { +public class MapperMurmur3Plugin extends Plugin implements MapperPlugin { - public void onModule(IndicesModule indicesModule) { - indicesModule.registerMapper(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()); + @Override + public Map getMappers() { + return Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()); } - } diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 9c07c7b3eb3..c9c8972c62d 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -42,15 +42,15 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; public class SizeFieldMapper extends MetadataFieldMapper { - public static final String NAME = "_size"; - public static final String CONTENT_TYPE = "_size"; public static class Defaults { public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final MappedFieldType SIZE_FIELD_TYPE = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); - public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); + public static final MappedFieldType SIZE_FIELD_TYPE = + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); + public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = + LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); @@ -68,14 +68,31 @@ public class SizeFieldMapper extends MetadataFieldMapper { } } + private static MappedFieldType defaultFieldType(Version indexCreated) { + MappedFieldType defaultFieldType; + if (indexCreated.before(Version.V_5_0_0_alpha2)) { + defaultFieldType = Defaults.LEGACY_SIZE_FIELD_TYPE.clone(); + // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 + defaultFieldType.setHasDocValues(false); + } else { + defaultFieldType = Defaults.SIZE_FIELD_TYPE.clone(); + if (indexCreated.onOrBefore(Version.V_5_0_0_alpha4)) { + // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 + defaultFieldType.setHasDocValues(false); + } else { + defaultFieldType.setHasDocValues(true); + } + } + return defaultFieldType; + } + public static class Builder extends MetadataFieldMapper.Builder { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private Builder(MappedFieldType existing, Version indexCreated) { - super(NAME, existing == null - ? indexCreated.before(Version.V_5_0_0_alpha2) ? Defaults.LEGACY_SIZE_FIELD_TYPE : Defaults.SIZE_FIELD_TYPE - : existing, Defaults.LEGACY_SIZE_FIELD_TYPE); + super(NAME, existing == null ? defaultFieldType(indexCreated) : existing.clone(), + defaultFieldType(indexCreated)); builder = this; } @@ -87,21 +104,27 @@ public class SizeFieldMapper extends MetadataFieldMapper { @Override public SizeFieldMapper build(BuilderContext context) { setupFieldType(context); - fieldType.setHasDocValues(false); + if (context.indexCreatedVersion().onOrBefore(Version.V_5_0_0_alpha4)) { + // Make sure that the doc_values are disabled on indices created before V_5_0_0_alpha4 + fieldType.setHasDocValues(false); + } return new SizeFieldMapper(enabledState, fieldType, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.indexVersionCreated()); + public MetadataFieldMapper.Builder parse(String name, Map node, + ParserContext parserContext) throws MapperParsingException { + Builder builder = new Builder(parserContext.mapperService().fullName(NAME), + parserContext.indexVersionCreated()); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); + builder.enabled(lenientNodeBooleanValue(fieldNode) ? + EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } } @@ -116,14 +139,15 @@ public class SizeFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabledState; - private SizeFieldMapper(Settings indexSettings, MappedFieldType mappedFieldType) { - this(Defaults.ENABLED_STATE, mappedFieldType == null ? Defaults.LEGACY_SIZE_FIELD_TYPE : mappedFieldType, indexSettings); + private SizeFieldMapper(Settings indexSettings, MappedFieldType existing) { + this(Defaults.ENABLED_STATE, + existing == null ? defaultFieldType(Version.indexCreated(indexSettings)) : existing.clone(), + indexSettings); } private SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) { - super(NAME, fieldType, Defaults.LEGACY_SIZE_FIELD_TYPE, indexSettings); + super(NAME, fieldType, defaultFieldType(Version.indexCreated(indexSettings)), indexSettings); this.enabledState = enabled; - } @Override diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/plugin/mapper/MapperSizePlugin.java b/plugins/mapper-size/src/main/java/org/elasticsearch/plugin/mapper/MapperSizePlugin.java index 1aecfdf444d..1df48d5695a 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/plugin/mapper/MapperSizePlugin.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/plugin/mapper/MapperSizePlugin.java @@ -19,13 +19,18 @@ package org.elasticsearch.plugin.mapper; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.size.SizeFieldMapper; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -public class MapperSizePlugin extends Plugin { +public class MapperSizePlugin extends Plugin implements MapperPlugin { - public void onModule(IndicesModule indicesModule) { - indicesModule.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); + @Override + public Map getMetadataMappers() { + return Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); } } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index 956645aebd2..7cbce102c57 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -67,7 +67,8 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { Settings settings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); - final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master + // workaround for dangling index loading issue when node is master + final String node = internalCluster().startDataOnlyNode(settings); Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); assertEquals(1, nodePaths.length); dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index b6c341c0601..279c5c96091 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -49,15 +49,19 @@ public class SizeMappingIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); + XContentBuilder builder = + jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again assertSizeMappingEnabled(index, type, true); // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); + XContentBuilder updateMappingBuilder = + jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text") + .endObject().endObject().endObject(); + PutMappingResponse putMappingResponse = + client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping @@ -68,15 +72,18 @@ public class SizeMappingIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); + XContentBuilder builder = + jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again assertSizeMappingEnabled(index, type, true); // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_size").field("enabled", false).endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); + XContentBuilder updateMappingBuilder = + jsonBuilder().startObject().startObject("_size").field("enabled", false).endObject().endObject(); + PutMappingResponse putMappingResponse = + client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping @@ -84,8 +91,10 @@ public class SizeMappingIT extends ESIntegTestCase { } private void assertSizeMappingEnabled(String index, String type, boolean enabled) throws IOException { - String errMsg = String.format(Locale.ROOT, "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", index, type); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); + String errMsg = String.format(Locale.ROOT, + "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", index, type); + GetMappingsResponse getMappingsResponse = + client().admin().indices().prepareGetMappings(index).addTypes(type).get(); Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); assertThat(errMsg, mappingSource, hasKey("_size")); String sizeAsString = mappingSource.get("_size").toString(); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index ce44fef59a5..8cc01aba4bb 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,52 +19,48 @@ package org.elasticsearch.index.mapper.size; +import java.util.Collection; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.plugin.mapper.MapperSizePlugin; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.junit.Before; - +import org.elasticsearch.test.InternalSettingsPlugin; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.instanceOf; import org.apache.lucene.index.IndexableField; public class SizeMappingTests extends ESSingleNodeTestCase { - - IndexService indexService; - MapperService mapperService; - DocumentMapperParser parser; - - @Before - public void before() { - indexService = createIndex("test"); - IndicesModule indices = new IndicesModule(new NamedWriteableRegistry()); - indices.registerMetadataMapper(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()); - mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext); - parser = mapperService.documentMapperParser(); + @Override + protected Collection> getPlugins() { + return pluginList(MapperSizePlugin.class, InternalSettingsPlugin.class); } public void testSizeEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); boolean stored = false; @@ -78,47 +74,82 @@ public class SizeMappingTests extends ESSingleNodeTestCase { } public void testSizeDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testSizeNotSet() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testThatDisablingWorksWhenMerging() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); + assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + .startObject("_size").field("enabled", false).endObject() + .endObject().endObject().string(); + docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping), + MapperService.MergeReason.MAPPING_UPDATE, false); - assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); + assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } + + public void testBWCMapper() throws Exception { + { + // IntPoint && docvalues=true for V_5_0_0_alpha5 + IndexService service = createIndex("foo", Settings.EMPTY, "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + MappedFieldType ft = mapper.fieldType(); + assertThat(ft.hasDocValues(), is(true)); + assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); + } + + { + // IntPoint with docvalues=false if version > V_5_0_0_alpha2 && version < V_5_0_0_beta1 + IndexService service = createIndex("foo2", + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha4.id).build(), + "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + assertThat(mapper.fieldType().hasDocValues(), is(false)); + assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); + } + + { + // LegacyIntField with docvalues=false if version < V_5_0_0_alpha2 + IndexService service = createIndex("foo3", + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha1.id).build(), + "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + assertThat(mapper.fieldType().hasDocValues(), is(false)); + assertThat(mapper.fieldType(), instanceOf(LegacyNumberFieldMapper.NumberFieldType.class)); + } + } + } diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index d3ef723cb20..125b7bdd6c5 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -40,3 +40,11 @@ thirdPartyAudit.excludes = [ 'org.slf4j.Logger', 'org.slf4j.LoggerFactory', ] + +integTest { + cluster { + setting 'cloud.azure.storage.my_account_test.account', 'cloudazureresource' + setting 'cloud.azure.storage.my_account_test.key', 'abcdefgh' + setting 'script.stored', 'true' + } +} \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 2809b8588f1..64193e0b2f9 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryName; import org.elasticsearch.repositories.RepositorySettings; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -56,7 +57,8 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { public AzureBlobStore(RepositoryName name, Settings settings, RepositorySettings repositorySettings, AzureStorageService client) throws URISyntaxException, StorageException { super(settings); - this.client = client.start(); + this.client = client; + client.start(); this.container = getValue(repositorySettings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); this.repositoryName = name.getName(); this.accountName = getValue(repositorySettings, Repository.ACCOUNT_SETTING, Storage.ACCOUNT_SETTING); @@ -127,7 +129,7 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { this.client.deleteBlob(this.accountName, this.locMode, container, blob); } - public InputStream getInputStream(String container, String blob) throws URISyntaxException, StorageException + public InputStream getInputStream(String container, String blob) throws URISyntaxException, StorageException, IOException { return this.client.getInputStream(this.accountName, this.locMode, container, blob); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index b6f5f12b266..46972482a6a 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -25,9 +25,11 @@ import com.microsoft.azure.storage.StorageException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -42,6 +44,9 @@ public interface AzureStorageService { final class Storage { public static final String PREFIX = "cloud.azure.storage."; + + public static final Setting STORAGE_ACCOUNTS = Setting.groupSetting(Storage.PREFIX, Setting.Property.NodeScope); + public static final Setting TIMEOUT_SETTING = Setting.timeSetting("cloud.azure.storage.timeout", TimeValue.timeValueMinutes(-1), Property.NodeScope); public static final Setting ACCOUNT_SETTING = @@ -71,7 +76,7 @@ public interface AzureStorageService { void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; InputStream getInputStream(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException; + throws URISyntaxException, StorageException, IOException; OutputStream getOutputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; @@ -82,5 +87,5 @@ public interface AzureStorageService { void moveBlob(String account, LocationMode mode, String container, String sourceBlob, String targetBlob) throws URISyntaxException, StorageException; - AzureStorageService start(); + void start(); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 09dbc6520a1..80e62b90ad3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -45,7 +45,7 @@ import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; -public class AzureStorageServiceImpl extends AbstractLifecycleComponent +public class AzureStorageServiceImpl extends AbstractLifecycleComponent implements AzureStorageService { final AzureStorageSettings primaryStorageSettings; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index 281ef79cb27..f64ffed4a03 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -112,9 +112,8 @@ public final class AzureStorageSettings { } private static List createStorageSettings(Settings settings) { - Setting storageGroupSetting = Setting.groupSetting(Storage.PREFIX, Setting.Property.NodeScope); // ignore global timeout which has the same prefix but does not belong to any group - Settings groups = storageGroupSetting.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); + Settings groups = Storage.STORAGE_ACCOUNTS.get(settings.filter((k) -> k.equals(Storage.TIMEOUT_SETTING.getKey()) == false)); List storageSettings = new ArrayList<>(); for (String groupName : groups.getAsGroups().keySet()) { storageSettings.add( diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java index ce122353f51..b04b613df21 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesModule; @@ -62,13 +61,13 @@ public class AzureRepositoryPlugin extends Plugin { @Override public List> getSettings() { - return Arrays.asList(AzureStorageService.Storage.ACCOUNT_SETTING, - AzureStorageService.Storage.COMPRESS_SETTING, - AzureStorageService.Storage.CONTAINER_SETTING, - AzureStorageService.Storage.BASE_PATH_SETTING, - AzureStorageService.Storage.CHUNK_SIZE_SETTING, - AzureStorageService.Storage.LOCATION_MODE_SETTING); - + return Arrays.asList(AzureStorageService.Storage.STORAGE_ACCOUNTS, + AzureStorageService.Storage.ACCOUNT_SETTING, + AzureStorageService.Storage.COMPRESS_SETTING, + AzureStorageService.Storage.CONTAINER_SETTING, + AzureStorageService.Storage.BASE_PATH_SETTING, + AzureStorageService.Storage.CHUNK_SIZE_SETTING, + AzureStorageService.Storage.LOCATION_MODE_SETTING); } @Override diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 4d3459cdcd4..1333c755e7c 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -65,7 +65,7 @@ public class AzureRepository extends BlobStoreRepository { private static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); - public final static String TYPE = "azure"; + public static final String TYPE = "azure"; public static final class Repository { public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", Property.NodeScope); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 8160c560325..21df62cd768 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -31,6 +31,8 @@ import org.elasticsearch.common.settings.Settings; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -41,7 +43,7 @@ import java.util.concurrent.ConcurrentHashMap; /** * In memory storage for unit tests */ -public class AzureStorageServiceMock extends AbstractLifecycleComponent +public class AzureStorageServiceMock extends AbstractLifecycleComponent implements AzureStorageService { protected Map blobs = new ConcurrentHashMap<>(); @@ -79,7 +81,10 @@ public class AzureStorageServiceMock extends AbstractLifecycleComponent APPLICATION_NAME = new Setting<>("application_name", GoogleCloudStoragePlugin.NAME, Function.identity(), Property.NodeScope, Property.Dynamic); public static final Setting SERVICE_ACCOUNT = - simpleString("service_account", Property.NodeScope, Property.Dynamic, Property.Filtered); + simpleString("service_account", Property.NodeScope, Property.Dynamic); public static final Setting HTTP_READ_TIMEOUT = timeSetting("http.read_timeout", NO_TIMEOUT, Property.NodeScope, Property.Dynamic); public static final Setting HTTP_CONNECT_TIMEOUT = diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 6ba726e2b24..a22178315f7 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -131,7 +131,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { } @Override - public Map listBlobsByPrefix(final @Nullable String prefix) throws IOException { + public Map listBlobsByPrefix(@Nullable final String prefix) throws IOException { FileStatus[] files = store.execute(new Operation() { @Override public FileStatus[] run(FileContext fileContext) throws IOException { diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 15cd55c823d..752a2abc3ce 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -31,7 +31,7 @@ import java.util.function.Function; /** * */ -public interface AwsS3Service extends LifecycleComponent { +public interface AwsS3Service extends LifecycleComponent { // Global AWS settings (shared between discovery-ec2 and repository-s3) // Each setting starting with `cloud.aws` also exists in discovery-ec2 project. Don't forget to update diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 352aa196b06..1a0c2992b15 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -44,7 +44,7 @@ import java.util.Map; /** * */ -public class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { +public class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { /** * (acceskey, endpoint) -> client @@ -155,6 +155,8 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent { + try { + blobStore.client().getObjectMetadata(blobStore.bucket(), buildKey(blobName)); + return true; + } catch (AmazonS3Exception e) { + return false; + } + }); } catch (AmazonS3Exception e) { return false; - } catch (Throwable e) { + } catch (Exception e) { throw new BlobStoreException("failed to check if blob exists", e); } } @@ -176,4 +183,19 @@ public class S3BlobContainer extends AbstractBlobContainer { return keyPath + blobName; } + /** + * + * Executes a {@link PrivilegedExceptionAction} with privileges enabled. + * + + */ + T doPrivileged(PrivilegedExceptionAction operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + try { + return AccessController.doPrivileged(operation); + } catch (PrivilegedActionException e) { + throw (IOException) e.getException(); + } + } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index fe184a06aef..e3b7e8296e0 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -57,7 +57,7 @@ import java.util.function.Function; */ public class S3Repository extends BlobStoreRepository { - public final static String TYPE = "s3"; + public static final String TYPE = "s3"; /** * Global S3 repositories settings. Starting with: repositories.s3 diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index e5f26c3e9d1..1f09cada2e5 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -22,4 +22,16 @@ grant { // TODO: get these fixed in aws sdk permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "getClassLoader"; + // Needed because of problems in AmazonS3Client: + // When no region is set on a AmazonS3Client instance, the + // AWS SDK loads all known partitions from a JSON file and + // uses a Jackson's ObjectMapper for that: this one, in + // version 2.5.3 with the default binding options, tries + // to suppress access checks of ctor/field/method and thus + // requires this special permission. AWS must be fixed to + // uses Jackson correctly and have the correct modifiers + // on binded classes. + // TODO: get these fixed in aws sdk + // See https://github.com/aws/aws-sdk-java/issues/766 + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 2e196610c1a..d1c43f15adb 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -52,7 +52,7 @@ import static org.hamcrest.Matchers.notNullValue; /** */ @ClusterScope(scope = Scope.SUITE, numDataNodes = 2, numClientNodes = 0, transportClientRatio = 0.0) -abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase { +public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { @@ -512,7 +512,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase multiObjectDeleteRequest.setKeys(keys); client.deleteObjects(multiObjectDeleteRequest); } - } catch (Throwable ex) { + } catch (Exception ex) { logger.warn("Failed to delete S3 repository [{}] in [{}]", ex, bucketName, region); } } diff --git a/plugins/store-smb/LICENSE.txt b/plugins/store-smb/LICENSE.txt deleted file mode 100644 index d6456956733..00000000000 --- a/plugins/store-smb/LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java index 9e29d6f091a..6016b0ca335 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java @@ -28,7 +28,7 @@ import java.util.Collection; import static org.hamcrest.Matchers.is; -abstract public class AbstractAzureFsTestCase extends ESIntegTestCase { +public abstract class AbstractAzureFsTestCase extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return pluginList(SMBStorePlugin.class); diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index fbce12f8126..657a6b7c078 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -18,6 +18,6 @@ integTest { cluster { numNodes = 2 numBwcNodes = 1 - bwcVersion = "5.0.0-alpha4-SNAPSHOT" // this is the same as the current version until we released the first RC + bwcVersion = "5.0.0-alpha5-SNAPSHOT" // this is the same as the current version until we released the first RC } } diff --git a/qa/evil-tests/build.gradle b/qa/evil-tests/build.gradle index 53406f1aad9..cba9334fbca 100644 --- a/qa/evil-tests/build.gradle +++ b/qa/evil-tests/build.gradle @@ -26,7 +26,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { - testCompile 'com.google.jimfs:jimfs:1.0' + testCompile 'com.google.jimfs:jimfs:1.1' } // TODO: give each evil test its own fresh JVM for more isolation. diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java index a319aaabb70..d028dfd573a 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java @@ -40,7 +40,7 @@ public class SeccompTests extends ESTestCase { if (!JNANatives.LOCAL_SECCOMP_ALL) { try { Seccomp.init(createTempDir()); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("unable to forcefully apply seccomp to test thread", e); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index 2b9b6ec6ab9..af1f311dd23 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -25,7 +25,8 @@ import com.google.common.jimfs.Jimfs; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.cli.UserError; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.UserException; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; @@ -70,6 +71,7 @@ import java.util.zip.ZipOutputStream; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.not; @LuceneTestCase.SuppressFileSystems("*") public class InstallPluginCommandTests extends ESTestCase { @@ -179,6 +181,10 @@ public class InstallPluginCommandTests extends ESTestCase { /** creates a plugin .zip and returns the url for testing */ static String createPlugin(String name, Path structure) throws IOException { + return createPlugin(name, structure, false); + } + + static String createPlugin(String name, Path structure, boolean createSecurityPolicyFile) throws IOException { PluginTestUtil.writeProperties(structure, "description", "fake desc", "name", name, @@ -186,6 +192,10 @@ public class InstallPluginCommandTests extends ESTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), "classname", "FakePlugin"); + if (createSecurityPolicyFile) { + String securityPolicyContent = "grant {\n permission java.lang.RuntimePermission \"setFactory\";\n};\n"; + Files.write(structure.resolve("plugin-security.policy"), securityPolicyContent.getBytes(StandardCharsets.UTF_8)); + } writeJar(structure.resolve("plugin.jar"), "FakePlugin"); return writeZip(structure, "elasticsearch"); } @@ -310,7 +320,7 @@ public class InstallPluginCommandTests extends ESTestCase { public void testUnknownPlugin() throws Exception { Tuple env = createEnv(fs, temp); - UserError e = expectThrows(UserError.class, () -> installPlugin("foo", env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin("foo", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Unknown plugin foo")); } @@ -340,7 +350,7 @@ public class InstallPluginCommandTests extends ESTestCase { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); assertInstallCleaned(env.v2()); } @@ -375,7 +385,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); assertInstallCleaned(env.v2()); } @@ -397,7 +407,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } @@ -409,7 +419,7 @@ public class InstallPluginCommandTests extends ESTestCase { Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); assertInstallCleaned(env.v2()); } @@ -480,7 +490,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } @@ -492,7 +502,7 @@ public class InstallPluginCommandTests extends ESTestCase { Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); assertInstallCleaned(env.v2()); } @@ -524,7 +534,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); assertInstallCleaned(env.v2()); } @@ -570,20 +580,54 @@ public class InstallPluginCommandTests extends ESTestCase { public void testInstallMisspelledOfficialPlugins() throws Exception { Tuple env = createEnv(fs, temp); - UserError e = expectThrows(UserError.class, () -> installPlugin("xpack", env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin("xpack", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin xpack, did you mean [x-pack]?")); - e = expectThrows(UserError.class, () -> installPlugin("analysis-smartnc", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?")); - e = expectThrows(UserError.class, () -> installPlugin("repository", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin repository, did you mean any of [repository-s3, repository-gcs]?")); - e = expectThrows(UserError.class, () -> installPlugin("unknown_plugin", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("unknown_plugin", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin")); } - // TODO: test batch flag? + public void testBatchFlag() throws Exception { + MockTerminal terminal = new MockTerminal(); + installPlugin(terminal, true); + assertThat(terminal.getOutput(), containsString("WARNING: plugin requires additional permissions")); + } + + public void testQuietFlagDisabled() throws Exception { + MockTerminal terminal = new MockTerminal(); + terminal.setVerbosity(randomFrom(Terminal.Verbosity.NORMAL, Terminal.Verbosity.VERBOSE)); + installPlugin(terminal, false); + assertThat(terminal.getOutput(), containsString("100%")); + } + + public void testQuietFlagEnabled() throws Exception { + MockTerminal terminal = new MockTerminal(); + terminal.setVerbosity(Terminal.Verbosity.SILENT); + installPlugin(terminal, false); + assertThat(terminal.getOutput(), not(containsString("100%"))); + } + + private void installPlugin(MockTerminal terminal, boolean isBatch) throws Exception { + Tuple env = createEnv(fs, temp); + Path pluginDir = createPluginDir(temp); + // if batch is enabled, we also want to add a security policy + String pluginZip = createPlugin("fake", pluginDir, isBatch); + + Map settings = new HashMap<>(); + settings.put("path.home", env.v1().toString()); + new InstallPluginCommand() { + @Override + void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { + } + }.execute(terminal, pluginZip, isBatch, settings); + } + // TODO: test checksum (need maven/official below) // TODO: test maven, official, and staging install...need tests with fixtures... } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 3a4639fa839..e2910be64f0 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -27,7 +27,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.cli.UserError; +import org.elasticsearch.cli.UserException; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -72,7 +72,7 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testMissing() throws Exception { - UserError e = expectThrows(UserError.class, () -> removePlugin("dne", home)); + UserException e = expectThrows(UserException.class, () -> removePlugin("dne", home)); assertTrue(e.getMessage(), e.getMessage().contains("plugin dne not found")); assertRemoveCleaned(env); } @@ -102,7 +102,7 @@ public class RemovePluginCommandTests extends ESTestCase { public void testBinNotDir() throws Exception { Files.createDirectories(env.pluginsFile().resolve("elasticsearch")); - UserError e = expectThrows(UserError.class, () -> removePlugin("elasticsearch", home)); + UserException e = expectThrows(UserException.class, () -> removePlugin("elasticsearch", home)); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("elasticsearch"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 4199a5d67cd..e7b5d1c4501 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -23,10 +23,10 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; @@ -65,14 +65,14 @@ public class TribeUnitTests extends ESTestCase { .put(baseSettings) .put("cluster.name", "tribe1") .put("node.name", "tribe1_node") - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); tribe2 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe2") .put("node.name", "tribe2_node") - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); } diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml b/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml index d4fa8d8d130..19b2a7b5dd9 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml @@ -1,5 +1,5 @@ cluster.name: tribe_node_cluster tribe.t1.cluster.name: tribe1 tribe.t2.cluster.name: tribe2 -tribe.t1.node_id.seed: 1 -tribe.t2.node_id.seed: 2 +tribe.t1.node.id.seed: 1 +tribe.t2.node.id.seed: 2 diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 58e7e50e421..878e9c5ef7f 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -40,8 +40,9 @@ List availableBoxes = [ 'ubuntu-1504' ] +String vagrantBoxes = getProperties().get('vagrant.boxes', 'sample') List boxes = [] -for (String box : getProperties().get('vagrant.boxes', 'sample').split(',')) { +for (String box : vagrantBoxes.split(',')) { if (box == 'sample') { boxes.add('centos-7') boxes.add('ubuntu-1404') @@ -56,9 +57,33 @@ for (String box : getProperties().get('vagrant.boxes', 'sample').split(',')) { } } -/* The version of elasticsearch that we upgrade *from* as part of testing - * upgrades. */ -String upgradeFromVersion = '2.3.3' +long seed +String formattedSeed = null +String[] upgradeFromVersions +String upgradeFromVersion + +String maybeTestsSeed = System.getProperty("tests.seed", null); +if (maybeTestsSeed != null) { + List seeds = maybeTestsSeed.tokenize(':') + if (seeds.size() != 0) { + String masterSeed = seeds.get(0) + seed = new BigInteger(masterSeed, 16).longValue() + formattedSeed = maybeTestsSeed + } +} +if (formattedSeed == null) { + seed = new Random().nextLong() + formattedSeed = String.format("%016X", seed) +} + +String maybeUpdradeFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) +if (maybeUpdradeFromVersions != null) { + upgradeFromVersions = maybeUpdradeFromVersions.split(",") +} else { + upgradeFromVersions = new File(project.projectDir, 'versions') +} + +upgradeFromVersion = upgradeFromVersions[new Random(seed).nextInt(upgradeFromVersions.length)] configurations { test @@ -95,8 +120,37 @@ task stop { description 'Stop any tasks from tests that still may be running' } +Set getVersions() { + Node xml + new URL('http://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> + xml = new XmlParser().parse(s) + } + return new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /2\.\d\.\d/ }) +} + +task updatePackagingTestUpgradeFromVersions { + doLast { + Set versions = getVersions() + new File(project.projectDir, 'versions').text = versions.join('\n') + '\n' + } +} + +task verifyPackagingTestUpgradeFromVersions { + doLast { + String maybeUpdateFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null) + if (maybeUpdateFromVersions == null) { + Set versions = getVersions() + Set actualVersions = new HashSet<>(Arrays.asList(upgradeFromVersions)) + if (!versions.equals(actualVersions)) { + throw new GradleException("out-of-date versions [" + actualVersions + "], expected [" + versions + "]; run gradle updatePackagingTestUpgradeFromVersions") + } + } + } +} + File testRoot = new File("$buildDir/testroot") task createTestRoot { + dependsOn verifyPackagingTestUpgradeFromVersions outputs.dir testRoot doLast { testRoot.mkdirs() @@ -121,6 +175,16 @@ task prepareTestRoot(type: Copy) { from configurations.test dependsOn createVersionFile, createUpgradeFromFile + doFirst { + gradle.addBuildListener new BuildAdapter() { + @Override + void buildFinished(BuildResult result) { + if (result.failure) { + println "Reproduce with: gradle packagingTest -Pvagrant.boxes=${vagrantBoxes} -Dtests.seed=${formattedSeed} -Dtests.packaging.upgrade.from.versions=${upgradeFromVersions.join(",")}" + } + } + } + } } task checkVagrantVersion(type: Exec) { @@ -164,6 +228,12 @@ for (String box : availableBoxes) { continue; } + Task update = tasks.create("vagrant${boxTask}#update", VagrantCommandTask) { + boxName box + args 'box', 'update', box + dependsOn checkVagrantVersion + } + Task up = tasks.create("vagrant${boxTask}#up", VagrantCommandTask) { boxName box /* Its important that we try to reprovision the box even if it already @@ -179,7 +249,7 @@ for (String box : availableBoxes) { args 'up', box, '--provision', '--provider', 'virtualbox' /* It'd be possible to check if the box is already up here and output SKIPPED but that would require running vagrant status which is slow! */ - dependsOn checkVagrantVersion + dependsOn update } Task smoke = tasks.create("vagrant${boxTask}#smoketest", Exec) { @@ -188,7 +258,7 @@ for (String box : availableBoxes) { commandLine 'vagrant', 'ssh', box, '--command', "set -o pipefail && ${smokeTestCommand} | sed -ue 's/^/ ${box}: /'" } - vagrantSmokeTest.dependsOn(smoke) + vagrantSmokeTest.dependsOn(smoke) Task packaging = tasks.create("packagingTest${boxTask}", BatsOverVagrantTask) { dependsOn up @@ -199,4 +269,3 @@ for (String box : availableBoxes) { } packagingTest.dependsOn(packaging) } - diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index c81f84cb778..eac0a80e4f4 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -209,8 +209,8 @@ fi install_and_check_plugin discovery gce google-api-client-*.jar } -@test "[$GROUP] install discovery-azure plugin" { - install_and_check_plugin discovery azure azure-core-*.jar +@test "[$GROUP] install discovery-azure-classic plugin" { + install_and_check_plugin discovery azure-classic azure-core-*.jar } @test "[$GROUP] install discovery-ec2 plugin" { @@ -228,6 +228,10 @@ fi install_and_check_plugin ingest geoip geoip2-*.jar jackson-annotations-*.jar jackson-databind-*.jar maxmind-db-*.jar } +@test "[$GROUP] install ingest-useragent plugin" { + install_and_check_plugin ingest useragent +} + @test "[$GROUP] check ingest-common module" { check_module ingest-common jcodings-*.jar joni-*.jar } @@ -341,8 +345,8 @@ fi remove_plugin discovery-gce } -@test "[$GROUP] remove discovery-azure plugin" { - remove_plugin discovery-azure +@test "[$GROUP] remove discovery-azure-classic plugin" { + remove_plugin discovery-azure-classic } @test "[$GROUP] remove discovery-ec2 plugin" { @@ -357,6 +361,10 @@ fi remove_plugin ingest-geoip } +@test "[$GROUP] remove ingest-useragent plugin" { + remove_plugin ingest-useragent +} + @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } @@ -418,17 +426,18 @@ fi @test "[$GROUP] install jvm-example with different logging modes and check output" { local relativePath=${1:-$(readlink -m jvm-example-*.zip)} sudo -E -u $ESPLUGIN_COMMAND_USER "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" > /tmp/plugin-cli-output - local loglines=$(cat /tmp/plugin-cli-output | wc -l) + # exclude progress line + local loglines=$(cat /tmp/plugin-cli-output | grep -v "^[[:cntrl:]]" | wc -l) if [ "$GROUP" == "TAR PLUGINS" ]; then # tar extraction does not create the plugins directory so the plugin tool will print an additional line that the directory will be created [ "$loglines" -eq "3" ] || { - echo "Expected 3 lines but the output was:" + echo "Expected 3 lines excluding progress bar but the output had $loglines lines and was:" cat /tmp/plugin-cli-output false } else [ "$loglines" -eq "2" ] || { - echo "Expected 2 lines but the output was:" + echo "Expected 2 lines excluding progress bar but the output had $loglines lines and was:" cat /tmp/plugin-cli-output false } @@ -437,16 +446,16 @@ fi local relativePath=${1:-$(readlink -m jvm-example-*.zip)} sudo -E -u $ESPLUGIN_COMMAND_USER ES_JAVA_OPTS="-Des.logger.level=DEBUG" "$ESHOME/bin/elasticsearch-plugin" install "file://$relativePath" > /tmp/plugin-cli-output - local loglines=$(cat /tmp/plugin-cli-output | wc -l) + local loglines=$(cat /tmp/plugin-cli-output | grep -v "^[[:cntrl:]]" | wc -l) if [ "$GROUP" == "TAR PLUGINS" ]; then [ "$loglines" -gt "3" ] || { - echo "Expected more than 3 lines but the output was:" + echo "Expected more than 3 lines excluding progress bar but the output had $loglines lines and was:" cat /tmp/plugin-cli-output false } else [ "$loglines" -gt "2" ] || { - echo "Expected more than 2 lines but the output was:" + echo "Expected more than 2 lines excluding progress bar but the output had $loglines lines and was:" cat /tmp/plugin-cli-output false } diff --git a/qa/vagrant/versions b/qa/vagrant/versions new file mode 100644 index 00000000000..5f6e0edf99c --- /dev/null +++ b/qa/vagrant/versions @@ -0,0 +1,13 @@ +2.0.0 +2.0.1 +2.0.2 +2.1.0 +2.1.1 +2.1.2 +2.2.0 +2.2.1 +2.2.2 +2.3.0 +2.3.1 +2.3.2 +2.3.3 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index d3660c4a679..218871a9765 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -3,7 +3,7 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-rollover-index.html", "methods": ["POST"], "url": { - "path": "/{alias}/_rollover}", + "path": "/{alias}/_rollover", "paths": ["/{alias}/_rollover", "/{alias}/_rollover/{new_index}"], "parts": { "alias": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json similarity index 97% rename from rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 379b7bdf362..39aa53b2572 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -1,5 +1,5 @@ { - "template.msearch": { + "msearch_template": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", "methods": ["GET", "POST"], "url": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/task.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json similarity index 97% rename from rest-api-spec/src/main/resources/rest-api-spec/api/task.get.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json index 8024f015e96..f97206cd16f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/task.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.get.json @@ -1,5 +1,5 @@ { - "task.get": { + "tasks.get": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html", "methods": ["GET"], "url": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json index a1913fbfc17..a966cb0e507 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -4,7 +4,7 @@ "methods": ["GET"], "url": { "path": "/_tasks", - "paths": ["/_tasks", "/_tasks/{task_id}"], + "paths": ["/_tasks"], "parts": {}, "params": { "node_id": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json deleted file mode 100644 index 6a2a8c1d7e2..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "template.search": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", - "methods": ["GET", "POST"], - "url": { - "path": "/_search/template", - "paths": ["/_search/template", "/{index}/_search/template", "/{index}/{type}/_search/template"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "params" : { - "ignore_unavailable": { - "type" : "boolean", - "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" - }, - "allow_no_indices": { - "type" : "boolean", - "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" - }, - "expand_wildcards": { - "type" : "enum", - "options" : ["open","closed","none","all"], - "default" : "open", - "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "preference": { - "type" : "string", - "description" : "Specify the node or shard the operation should be performed on (default: random)" - }, - "routing": { - "type" : "list", - "description" : "A comma-separated list of specific routing values" - }, - "scroll": { - "type" : "duration", - "description" : "Specify how long a consistent view of the index should be maintained for scrolled search" - }, - "search_type": { - "type" : "enum", - "options" : ["query_then_fetch", "query_and_fetch", "dfs_query_then_fetch", "dfs_query_and_fetch"], - "description" : "Search operation type" - } - } - }, - "body": { - "description": "The search definition template and its params" - } - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc index 688d8cbdc5b..4e88cef4c9f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc @@ -20,6 +20,7 @@ Test file structure A YAML test file consists of: * an optional `setup` section, followed by +* an optional `teardown` section, followed by * one or more test sections For instance: @@ -28,6 +29,10 @@ For instance: - do: .... - do: .... + --- + teardown: + - do: .... + --- "First test": - do: ... @@ -42,6 +47,11 @@ For instance: A `setup` section contains a list of commands to run before each test section in order to setup the same environment for each test section. +A `teardown` section contains a list of commands to run after each test +section in order to setup the same environment for each test section. This +may be needed for modifications made by the testthat are not cleared by the +deletion of indices and templates. + A test section represents an independent test, containing multiple `do` statements and assertions. The contents of a test section must be run in order, but individual test sections may be run in any order, as follows: @@ -49,9 +59,8 @@ order, but individual test sections may be run in any order, as follows: 1. run `setup` (if any) 2. reset the `response` var and the `stash` (see below) 2. run test contents -3. run teardown - -The `teardown` should delete all indices and all templates. +3. run `teardown` (if any) +4. delete all indices and all templates Dot notation: ------------- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml index b814856144b..16260151a31 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml @@ -17,12 +17,8 @@ headers: Accept: application/yaml - - match: - $body: | - /^---\n - -\s+alias:\s+"test_alias"\s+ - index:\s+"test"\s+ - filter:\s+"-"\s+ - routing.index:\s+"-"\s+ - routing.search:\s+"-"\s+$/ - + - match: {0.alias: test_alias} + - match: {0.index: test} + - match: {0.filter: "-"} + - match: {0.routing\.index: "-"} + - match: {0.routing\.search: "-"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml index c892891f08f..178b77ce60d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml @@ -1,8 +1,5 @@ --- -"Simple alias with yaml body through format argument": - - - skip: - features: yaml +"Simple alias with json body through format argument": - do: indices.create: @@ -15,15 +12,10 @@ - do: cat.aliases: - format: yaml - - - match: - $body: | - /^---\n - -\s+alias:\s+"test_alias"\s+ - index:\s+"test"\s+ - filter:\s+"-"\s+ - routing.index:\s+"-"\s+ - routing.search:\s+"-"\s+$/ - + format: json + - match: {0.alias: test_alias} + - match: {0.index: test} + - match: {0.filter: "-"} + - match: {0.routing\.index: "-"} + - match: {0.routing\.search: "-"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index 51f8fe9ed4c..1e485fc6d30 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -26,6 +26,7 @@ /^(green \s+ open \s+ index1 \s+ + ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ 1 \s+ 0 \s+ 0 \s+ @@ -62,6 +63,7 @@ /^( \s+ close \s+ index1 \s+ + ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ \s+ \s+ \s+ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml index 1bb031f0878..ed9a5b7c99f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml @@ -18,13 +18,9 @@ type: _all id: 1 - - match: - $body: | - /^---\n - _index:\s+\"test_1"\n - _type:\s+"test"\n - _id:\s+"1"\n - _version:\s+1\n - found:\s+true\n - _source:\n - \s+body:\s+"foo"\n$/ + - match: {_index: "test_1"} + - match: {_type: "test"} + - match: {_id: "1"} + - match: {_version: 1} + - match: {found: true} + - match: { _source: { body: foo }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml new file mode 100644 index 00000000000..370f68d9504 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml @@ -0,0 +1,40 @@ +--- +setup: + - do: + indices.create: + index: test_1 + body: + mappings: + test: + _source: { enabled: false } + - do: + cluster.health: + wait_for_status: yellow + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + +--- +"Missing document source with catch": + + - do: + catch: missing + get_source: + index: test_1 + type: test + id: 1 + +--- +"Missing document source with ignore": + + - do: + get_source: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml index 647907614fa..3a4821193e6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml @@ -30,11 +30,11 @@ # perform alias rollover - do: - indices.rollover: - alias: "logs_search" - body: - conditions: - max_docs: 1 + indices.rollover: + alias: "logs_search" + body: + conditions: + max_docs: 1 - match: { old_index: logs-1 } - match: { new_index: logs-2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml index 525ea7ac421..ef4d53167b6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml @@ -3,6 +3,9 @@ setup: - do: indices.create: index: test_1 + - do: + cluster.health: + wait_for_status: yellow --- "Check Status": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml index caf44d0c788..424153aa573 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml @@ -1,6 +1,5 @@ --- -"Source filtering": - +setup: - do: index: index: test_1 @@ -10,40 +9,54 @@ - do: indices.refresh: {} +--- +"_source: true": - do: search: - # stringified for boolean value body: { _source: true, query: { match_all: {} } } - length: { hits.hits: 1 } - match: { hits.hits.0._source.count: 1 } +--- +"_source: false": - do: { search: { body: { _source: false, query: { match_all: {} } } } } - length: { hits.hits: 1 } - is_false: hits.hits.0._source +--- +"no filtering": - do: { search: { body: { query: { match_all: {} } } } } - length: { hits.hits: 1 } - match: { hits.hits.0._source.count: 1 } +--- +"_source in body": - do: { search: { body: { _source: include.field1, query: { match_all: {} } } } } - match: { hits.hits.0._source.include.field1: v1 } - is_false: hits.hits.0._source.include.field2 +--- +"_source include and _source in body": - do: { search: { _source_include: include.field1, body: { _source: include.field2, query: { match_all: {} } } } } - match: { hits.hits.0._source.include.field1: v1 } - is_false: hits.hits.0._source.include.field2 +--- +"_source_include": - do: { search: { _source_include: include.field1, body: { query: { match_all: {} } } } } - match: { hits.hits.0._source.include.field1: v1 } - is_false: hits.hits.0._source.include.field2 +--- +"_source_exclude": - do: { search: { _source_exclude: count, body: { query: { match_all: {} } } } } - match: { hits.hits.0._source.include: { field1 : v1 , field2: v2 }} - is_false: hits.hits.0._source.count - +--- +"_source field1 field2": - do: search: body: @@ -53,6 +66,8 @@ - match: { hits.hits.0._source.include.field2: v2 } - is_false: hits.hits.0._source.count +--- +"_source.include field1 field2": - do: search: body: @@ -63,6 +78,8 @@ - match: { hits.hits.0._source.include.field2: v2 } - is_false: hits.hits.0._source.count +--- +"_source includes and excludes": - do: search: body: @@ -73,7 +90,8 @@ - match: { hits.hits.0._source.include.field1: v1 } - is_false: hits.hits.0._source.include.field2 - +--- +"fields in body": - do: search: body: @@ -81,6 +99,8 @@ query: { match_all: {} } - is_false: hits.hits.0._source +--- +"fields in body with source": - do: search: body: @@ -89,13 +109,9 @@ - match: { hits.hits.0._source.include.field2: v2 } - is_true: hits.hits.0._source +--- +"fielddata_fields": - do: search: docvalue_fields: [ "count" ] - match: { hits.hits.0.fields.count: [1] } - - - do: - search: - fielddata_fields: [ "count" ] - - match: { hits.hits.0.fields.count: [1] } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/task.get/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.get/10_basic.yaml similarity index 92% rename from rest-api-spec/src/main/resources/rest-api-spec/test/task.get/10_basic.yaml rename to rest-api-spec/src/main/resources/rest-api-spec/test/tasks.get/10_basic.yaml index ba90e1541fe..48d9f46ac73 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/task.get/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.get/10_basic.yaml @@ -6,5 +6,5 @@ - do: catch: missing - task.get: + tasks.get: task_id: foo:1 diff --git a/settings.gradle b/settings.gradle index c0d7a72b2a5..0c1e4b01e5c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -31,11 +31,12 @@ List projects = [ 'plugins:analysis-phonetic', 'plugins:analysis-smartcn', 'plugins:analysis-stempel', - 'plugins:discovery-azure', + 'plugins:discovery-azure-classic', 'plugins:discovery-ec2', 'plugins:discovery-gce', 'plugins:ingest-geoip', 'plugins:ingest-attachment', + 'plugins:ingest-useragent', 'plugins:lang-javascript', 'plugins:lang-python', 'plugins:mapper-attachments', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java index aa327ae2546..f08f15f236b 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -53,12 +53,12 @@ abstract class ESElasticsearchCliTestCase extends ESTestCase { assertThat(status, equalTo(expectedStatus)); assertThat(init.get(), equalTo(expectedInit)); outputConsumer.accept(terminal.getOutput()); - } catch (Throwable t) { + } catch (Exception e) { // if an unexpected exception is thrown, we log // terminal output to aid debugging logger.info(terminal.getOutput()); // rethrow so the test fails - throw t; + throw e; } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 128b0d0e315..45edbd8bcb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; @@ -67,7 +67,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes()); paths[0] = path; FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), null, paths); - return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + return new NodeStats(new DiscoveryNode(nodeName, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), System.currentTimeMillis(), null, null, null, null, null, fsInfo, diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 613445c2271..22a1e2660b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -246,7 +246,7 @@ public class MockBigArrays extends BigArrays { return arr; } - private static abstract class AbstractArrayWrapper { + private abstract static class AbstractArrayWrapper { final BigArray in; boolean clearOnResize; diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index fe434246035..8889dc5aac5 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -36,11 +36,13 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Collections; +import static org.elasticsearch.test.ESTestCase.createAnalysisService; + public class MapperTestUtils { public static MapperService newMapperService(Path tempDir, Settings indexSettings) throws IOException { - IndicesModule indicesModule = new IndicesModule(new NamedWriteableRegistry()); + IndicesModule indicesModule = new IndicesModule(new NamedWriteableRegistry(), Collections.emptyList()); return newMapperService(tempDir, indexSettings, indicesModule); } @@ -54,7 +56,7 @@ public class MapperTestUtils { Settings finalSettings = settingsBuilder.build(); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", finalSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(finalSettings)).build(indexSettings); + AnalysisService analysisService = createAnalysisService(indexSettings, finalSettings); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); return new MapperService(indexSettings, analysisService, diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java index b32a2eab991..a9585aa8d41 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java @@ -28,7 +28,7 @@ import org.elasticsearch.plugins.Plugin; public class IngestTestPlugin extends Plugin { public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor("test", (registry) -> config -> + nodeModule.registerProcessor("test", (registry) -> (tag, config) -> new TestProcessor("id", "test", doc -> { doc.setFieldValue("processed", true); if (doc.hasField("fail") && doc.getFieldValue("fail", Boolean.class)) { diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java index e36d73a8d9f..b309d94fe08 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java @@ -64,9 +64,9 @@ public class TestProcessor implements Processor { return invokedCounter.get(); } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public TestProcessor doCreate(String processorTag, Map config) throws Exception { + public TestProcessor create(String processorTag, Map config) throws Exception { return new TestProcessor(processorTag, "test-processor", ingestDocument -> {}); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 5eeaa635305..0d9ed196838 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -71,7 +71,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -86,12 +85,14 @@ import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.script.Script.ScriptParseException; @@ -118,6 +119,7 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.ExecutionException; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -324,11 +326,11 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as string argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final static void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { + protected static final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsString, expectedQuery, ParseFieldMatcher.STRICT); } - protected final static void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + protected static final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { QueryBuilder newQuery = parseQuery(queryAsString, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); @@ -338,31 +340,31 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as bytes argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final static void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { + protected static final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsBytes, expectedQuery, ParseFieldMatcher.STRICT); } - protected final static void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + protected static final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { QueryBuilder newQuery = parseQuery(queryAsBytes, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); } - protected final static QueryBuilder parseQuery(String queryAsString) throws IOException { + protected static final QueryBuilder parseQuery(String queryAsString) throws IOException { return parseQuery(queryAsString, ParseFieldMatcher.STRICT); } - protected final static QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { + protected static final QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsString).createParser(queryAsString); return parseQuery(parser, matcher); } - protected final static QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { + protected static final QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { return parseQuery(queryAsBytes, ParseFieldMatcher.STRICT); } - protected final static QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { + protected static final QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(queryAsBytes); return parseQuery(parser, matcher); } @@ -511,7 +513,7 @@ public abstract class AbstractQueryTestCase> protected QueryBuilder assertSerialization(QueryBuilder testQuery) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testQuery); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), serviceHolder.namedWriteableRegistry)) { QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); assertEquals(testQuery, deserializedQuery); assertEquals(testQuery.hashCode(), deserializedQuery.hashCode()); @@ -560,7 +562,7 @@ public abstract class AbstractQueryTestCase> protected QB copyQuery(QB query) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(query); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), serviceHolder.namedWriteableRegistry)) { return (QB) in.readNamedWriteable(QueryBuilder.class); } } @@ -688,7 +690,7 @@ public abstract class AbstractQueryTestCase> } public static class GeohashGenerator extends CodepointSetGenerator { - private final static char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray(); + private static final char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray(); public GeohashGenerator() { super(ASCII_SET); @@ -878,7 +880,7 @@ public abstract class AbstractQueryTestCase> scriptSettings.addAll(pluginsService.getPluginSettings()); scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, pluginsService.getPluginSettingsFilter()); - searchModule = new SearchModule(settings, namedWriteableRegistry) { + searchModule = new SearchModule(settings, namedWriteableRegistry, false) { @Override protected void configureSearch() { // Skip me @@ -894,7 +896,7 @@ public abstract class AbstractQueryTestCase> b.bind(Environment.class).toInstance(new Environment(settings)); b.bind(ThreadPool.class).toInstance(threadPool); }, - settingsModule, new IndicesModule(namedWriteableRegistry) { + settingsModule, new IndicesModule(namedWriteableRegistry, pluginsService.filterPlugins(MapperPlugin.class)) { @Override public void configure() { // skip services @@ -917,7 +919,8 @@ public abstract class AbstractQueryTestCase> injector = modulesBuilder.createInjector(); IndexScopedSettings indexScopedSettings = injector.getInstance(IndexScopedSettings.class); idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings, indexScopedSettings); - AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); + AnalysisModule analysisModule = new AnalysisModule(new Environment(settings), emptyList()); + AnalysisService analysisService = analysisModule.getAnalysisRegistry().build(idxSettings); scriptService = scriptModule.getScriptService(); similarityService = new SimilarityService(idxSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = injector.getInstance(MapperRegistry.class); diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index 933f26e6e81..4440fbe117d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -49,7 +49,7 @@ public class BackgroundIndexer implements AutoCloseable { final Thread[] writers; final CountDownLatch stopLatch; - final CopyOnWriteArrayList failures; + final CopyOnWriteArrayList failures; final AtomicBoolean stop = new AtomicBoolean(false); final AtomicLong idGenerator = new AtomicLong(); final AtomicLong indexCounter = new AtomicLong(); @@ -169,7 +169,7 @@ public class BackgroundIndexer implements AutoCloseable { } } logger.info("**** done indexing thread {} stop: {} numDocsIndexed: {}", indexerId, stop.get(), indexCounter.get()); - } catch (Throwable e) { + } catch (Exception e) { failures.add(e); logger.warn("**** failed indexing thread {} on doc id {}", e, indexerId, id); } finally { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index fe7ba74a327..a6d35930e6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; @@ -30,7 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; @@ -46,7 +45,7 @@ public class ClusterServiceUtils { ClusterService clusterService = new ClusterService(Settings.builder().put("cluster.name", "ClusterServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool); - clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + clusterService.setLocalNode(new DiscoveryNode("node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())),Version.CURRENT)); clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { @Override @@ -98,8 +97,8 @@ public class ClusterServiceUtils { } @Override - public void onFailure(String source, Throwable t) { - fail("unexpected exception" + t); + public void onFailure(String source, Exception e) { + fail("unexpected exception" + e); } }); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index 7cbb7e819c4..1aa0428454e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -38,10 +38,11 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; @@ -132,19 +133,19 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static DiscoveryNode newNode(String nodeName, String nodeId, Map attributes) { - return new DiscoveryNode(nodeName, nodeId, DummyTransportAddress.INSTANCE, attributes, MASTER_DATA_ROLES, Version.CURRENT); + return new DiscoveryNode(nodeName, nodeId, LocalTransportAddress.buildUnique(), attributes, MASTER_DATA_ROLES, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Map attributes) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, attributes, MASTER_DATA_ROLES, Version.CURRENT); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), attributes, MASTER_DATA_ROLES, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Set roles) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), roles, Version.CURRENT); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), roles, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Version version) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), MASTER_DATA_ROLES, version); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, version); } protected static ClusterState startRandomInitializingShard(ClusterState clusterState, AllocationService strategy) { @@ -157,7 +158,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static AllocationDeciders yesAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.YES)}); + return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.YES), + new SameShardAllocationDecider(Settings.EMPTY)}); } protected static AllocationDeciders noAllocationDeciders() { @@ -165,7 +167,8 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static AllocationDeciders throttleAllocationDeciders() { - return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.THROTTLE)}); + return new AllocationDeciders(Settings.EMPTY, new AllocationDecider[] {new TestAllocateDecision(Decision.THROTTLE), + new SameShardAllocationDecider(Settings.EMPTY)}); } public static class TestAllocateDecision extends AllocationDecider { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a49c33f5be8..6d07a85e50b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -106,7 +106,6 @@ import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; @@ -405,7 +404,7 @@ public abstract class ESIntegTestCase extends ESTestCase { .setOrder(0) .setSettings(randomSettingsBuilder); if (mappings != null) { - logger.info("test using _default_ mappings: [{}]", mappings.bytes().toUtf8()); + logger.info("test using _default_ mappings: [{}]", mappings.bytes().utf8ToString()); putTemplate.addMapping("_default_", mappings); } assertAcked(putTemplate.execute().actionGet()); @@ -925,7 +924,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * This saves on unneeded searches. * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, final @Nullable BackgroundIndexer indexer) throws InterruptedException { + public long waitForDocs(final long numDocs, @Nullable final BackgroundIndexer indexer) throws InterruptedException { // indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED. return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer); } @@ -940,7 +939,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * This saves on unneeded searches. * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, final @Nullable BackgroundIndexer indexer) + public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, @Nullable final BackgroundIndexer indexer) throws InterruptedException { final AtomicLong lastKnownCount = new AtomicLong(-1); long lastStartCount = -1; @@ -956,7 +955,7 @@ public abstract class ESIntegTestCase extends ESTestCase { client().admin().indices().prepareRefresh().get(); } lastKnownCount.set(count); - } catch (Throwable e) { // count now acts like search and barfs if all shards failed... + } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); return false; } @@ -1334,7 +1333,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } final String[] indices = indicesSet.toArray(new String[indicesSet.size()]); Collections.shuffle(builders, random()); - final CopyOnWriteArrayList> errors = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList> errors = new CopyOnWriteArrayList<>(); List inFlightAsyncOperations = new ArrayList<>(); // If you are indexing just a few documents then frequently do it one at a time. If many then frequently in bulk. if (builders.size() < FREQUENT_BULK_THRESHOLD ? frequently() : builders.size() < ALWAYS_BULK_THRESHOLD ? rarely() : false) { @@ -1367,8 +1366,8 @@ public abstract class ESIntegTestCase extends ESTestCase { for (CountDownLatch operation : inFlightAsyncOperations) { operation.await(); } - final List actualErrors = new ArrayList<>(); - for (Tuple tuple : errors) { + final List actualErrors = new ArrayList<>(); + for (Tuple tuple : errors) { if (ExceptionsHelper.unwrapCause(tuple.v2()) instanceof EsRejectedExecutionException) { tuple.v1().execute().actionGet(); // re-index if rejected } else { @@ -1526,7 +1525,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } @Override - public final void onFailure(Throwable t) { + public final void onFailure(Exception t) { try { logger.info("Action Failed", t); addError(t); @@ -1535,24 +1534,24 @@ public abstract class ESIntegTestCase extends ESTestCase { } } - protected void addError(Throwable t) { + protected void addError(Exception e) { } } private class PayloadLatchedActionListener extends LatchedActionListener { - private final CopyOnWriteArrayList> errors; + private final CopyOnWriteArrayList> errors; private final T builder; - public PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList> errors) { + public PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList> errors) { super(latch); this.errors = errors; this.builder = builder; } @Override - protected void addError(Throwable t) { - errors.add(new Tuple<>(builder, t)); + protected void addError(Exception e) { + errors.add(new Tuple<>(builder, e)); } } @@ -2035,7 +2034,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise * it cannot be reused by other tests anymore. */ - protected synchronized static RestClient getRestClient() { + protected static synchronized RestClient getRestClient() { if (restClient == null) { restClient = createRestClient(null); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 76616fbdb44..6d63b6a5428 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -28,6 +28,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; @@ -41,10 +42,10 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,9 +55,15 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -85,6 +92,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutorService; @@ -337,9 +345,15 @@ public abstract class ESTestCase extends LuceneTestCase { /** Pick a random object from the given array. The array must not be empty. */ public static T randomFrom(T... array) { - return RandomPicks.randomFrom(random(), array); + return randomFrom(random(), array); } + /** Pick a random object from the given array. The array must not be empty. */ + public static T randomFrom(Random random, T... array) { + return RandomPicks.randomFrom(random, array); + } + + /** Pick a random object from the given list. */ public static T randomFrom(List list) { return RandomPicks.randomFrom(random(), list); @@ -406,7 +420,7 @@ public abstract class ESTestCase extends LuceneTestCase { return generateRandomStringArray(maxArraySize, maxStringSize, allowNull, true); } - private static String[] TIME_SUFFIXES = new String[]{"d", "H", "ms", "s", "S", "w"}; + private static String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m"}; private static String randomTimeValue(int lower, int upper) { return randomIntBetween(lower, upper) + randomFrom(TIME_SUFFIXES); @@ -768,29 +782,34 @@ public abstract class ESTestCase extends LuceneTestCase { } /** - * Creates an AnalysisService to test analysis factories and analyzers. + * Creates an AnalysisService with all the default analyzers configured. */ - @SafeVarargs - public static AnalysisService createAnalysisService(Index index, Settings settings, Consumer... moduleConsumers) throws IOException { + public static AnalysisService createAnalysisService(Index index, Settings settings, AnalysisPlugin... analysisPlugins) + throws IOException { Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - return createAnalysisService(index, nodeSettings, settings, moduleConsumers); + return createAnalysisService(index, nodeSettings, settings, analysisPlugins); } /** - * Creates an AnalysisService to test analysis factories and analyzers. + * Creates an AnalysisService with all the default analyzers configured. */ - @SafeVarargs - public static AnalysisService createAnalysisService(Index index, Settings nodeSettings, Settings settings, Consumer... moduleConsumers) throws IOException { + public static AnalysisService createAnalysisService(Index index, Settings nodeSettings, Settings settings, + AnalysisPlugin... analysisPlugins) throws IOException { Settings indexSettings = Settings.builder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build(); + return createAnalysisService(IndexSettingsModule.newIndexSettings(index, indexSettings), nodeSettings, analysisPlugins); + } + + /** + * Creates an AnalysisService with all the default analyzers configured. + */ + public static AnalysisService createAnalysisService(IndexSettings indexSettings, Settings nodeSettings, + AnalysisPlugin... analysisPlugins) throws IOException { Environment env = new Environment(nodeSettings); - AnalysisModule analysisModule = new AnalysisModule(env); - for (Consumer consumer : moduleConsumers) { - consumer.accept(analysisModule); - } - SettingsModule settingsModule = new SettingsModule(nodeSettings, InternalSettingsPlugin.VERSION_CREATED); - final AnalysisService analysisService = analysisModule.buildRegistry().build(IndexSettingsModule.newIndexSettings(index, indexSettings)); + AnalysisModule analysisModule = new AnalysisModule(env, Arrays.asList(analysisPlugins)); + final AnalysisService analysisService = analysisModule.getAnalysisRegistry() + .build(indexSettings); return analysisService; } @@ -803,4 +822,21 @@ public abstract class ESTestCase extends LuceneTestCase { Environment environment = new Environment(settings); return new ScriptModule(settings, environment, null, singletonList(new MockScriptEngine()), emptyList()); } + + /** Creates an IndicesModule for testing with the given mappers and metadata mappers. */ + public static IndicesModule newTestIndicesModule(Map extraMappers, + Map extraMetadataMappers) { + return new IndicesModule(new NamedWriteableRegistry(), Collections.singletonList( + new MapperPlugin() { + @Override + public Map getMappers() { + return extraMappers; + } + @Override + public Map getMetadataMappers() { + return extraMetadataMappers; + } + } + )); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 2873b4b5954..5dc3ce7abf3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -28,6 +28,7 @@ import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -38,7 +39,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeService; +import org.elasticsearch.cluster.node.DiscoveryNode.Role; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardRouting; @@ -87,6 +88,7 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; @@ -367,15 +369,6 @@ public final class InternalTestCluster extends TestCluster { private Settings getSettings(int nodeOrdinal, long nodeSeed, Settings others) { Builder builder = Settings.builder().put(defaultSettings) .put(getRandomNodeSettings(nodeSeed)); - Settings interimSettings = builder.build(); - final String dataSuffix = getRoleSuffix(interimSettings); - if (dataSuffix.isEmpty() == false) { - // to make sure that a master node will not pick up on the data folder of a data only node - // once restarted we append the role suffix to each path. - String[] dataPath = Environment.PATH_DATA_SETTING.get(interimSettings).stream() - .map(path -> path + dataSuffix).toArray(String[]::new); - builder.putArray(Environment.PATH_DATA_SETTING.getKey(), dataPath); - } Settings settings = nodeConfigurationSource.nodeSettings(nodeOrdinal); if (settings != null) { if (settings.get(ClusterName.CLUSTER_NAME_SETTING.getKey()) != null) { @@ -410,12 +403,12 @@ public final class InternalTestCluster extends TestCluster { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); } if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval - builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(10 + random.nextInt(2000))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(10 + random.nextInt(2000)).getStringRep()); } else if (random.nextInt(10) != 0) { // 90% of the time - 10% of the time we don't set anything - builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60)).getStringRep()); } if (random.nextBoolean()) { // sometimes set a - builder.put(SearchService.DEFAULT_KEEPALIVE_SETTING.getKey(), TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); + builder.put(SearchService.DEFAULT_KEEPALIVE_SETTING.getKey(), TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60)).getStringRep()); } builder.put(EsExecutors.PROCESSORS_SETTING.getKey(), 1 + random.nextInt(3)); @@ -428,9 +421,9 @@ public final class InternalTestCluster extends TestCluster { // randomize netty settings if (random.nextBoolean()) { builder.put(NettyTransport.WORKER_COUNT.getKey(), random.nextInt(3) + 1); - builder.put(NettyTransport.CONNECTIONS_PER_NODE_RECOVERY.getKey(), random.nextInt(2) + 1); - builder.put(NettyTransport.CONNECTIONS_PER_NODE_BULK.getKey(), random.nextInt(3) + 1); - builder.put(NettyTransport.CONNECTIONS_PER_NODE_REG.getKey(), random.nextInt(6) + 1); + builder.put(TcpTransport.CONNECTIONS_PER_NODE_RECOVERY.getKey(), random.nextInt(2) + 1); + builder.put(TcpTransport.CONNECTIONS_PER_NODE_BULK.getKey(), random.nextInt(3) + 1); + builder.put(TcpTransport.CONNECTIONS_PER_NODE_REG.getKey(), random.nextInt(6) + 1); } if (random.nextBoolean()) { @@ -462,14 +455,14 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(NettyTransport.PING_SCHEDULE.getKey(), RandomInts.randomIntBetween(random, 100, 2000) + "ms"); + builder.put(TcpTransport.PING_SCHEDULE.getKey(), RandomInts.randomIntBetween(random, 100, 2000) + "ms"); } if (random.nextBoolean()) { builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), RandomInts.randomIntBetween(random, 0, 2000)); } if (random.nextBoolean()) { - builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000))); + builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING.getKey(), TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000)).getStringRep()); } return builder.build(); @@ -498,7 +491,7 @@ public final class InternalTestCluster extends TestCluster { return randomNodeAndClient; } NodeAndClient buildNode = buildNode(); - buildNode.node().start(); + buildNode.startNode(); publishNode(buildNode); return buildNode; } @@ -569,7 +562,7 @@ public final class InternalTestCluster extends TestCluster { n == 0 ? nodes.values().stream() : nodes.values().stream().filter(new DataNodePredicate().and(new MasterNodePredicate(getMasterName()).negate())); final Iterator values = collection.iterator(); - logger.info("changing cluster size from {} to {}, {} data nodes", size(), n + numSharedCoordOnlyNodes, n); + logger.info("changing cluster size from {} data nodes to {}", size, n); Set nodesToRemove = new HashSet<>(); int numNodesAndClients = 0; while (values.hasNext() && numNodesAndClients++ < size - n) { @@ -586,17 +579,17 @@ public final class InternalTestCluster extends TestCluster { } } - private NodeAndClient buildNode(Settings settings, Version version) { + private NodeAndClient buildNode(Settings settings) { int ord = nextNodeId.getAndIncrement(); - return buildNode(ord, random.nextLong(), settings, version, false); + return buildNode(ord, random.nextLong(), settings, false); } private NodeAndClient buildNode() { int ord = nextNodeId.getAndIncrement(); - return buildNode(ord, random.nextLong(), null, Version.CURRENT, false); + return buildNode(ord, random.nextLong(), null, false); } - private NodeAndClient buildNode(int nodeId, long seed, Settings settings, Version version, boolean reuseExisting) { + private NodeAndClient buildNode(int nodeId, long seed, Settings settings, boolean reuseExisting) { assert Thread.holdsLock(this); ensureOpen(); settings = getSettings(nodeId, seed, settings); @@ -612,10 +605,10 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("node.name", name) - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), seed) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed) .build(); MockNode node = new MockNode(finalSettings, plugins); - return new NodeAndClient(name, node); + return new NodeAndClient(name, node, nodeId); } private String buildNodeName(int id, Settings settings) { @@ -630,10 +623,10 @@ public final class InternalTestCluster extends TestCluster { private String getRoleSuffix(Settings settings) { String suffix = ""; if (Node.NODE_MASTER_SETTING.exists(settings) && Node.NODE_MASTER_SETTING.get(settings)) { - suffix = suffix + DiscoveryNode.Role.MASTER.getAbbreviation(); + suffix = suffix + Role.MASTER.getAbbreviation(); } if (Node.NODE_DATA_SETTING.exists(settings) && Node.NODE_DATA_SETTING.get(settings)) { - suffix = suffix + DiscoveryNode.Role.DATA.getAbbreviation(); + suffix = suffix + Role.DATA.getAbbreviation(); } if (Node.NODE_MASTER_SETTING.exists(settings) && Node.NODE_MASTER_SETTING.get(settings) == false && Node.NODE_DATA_SETTING.exists(settings) && Node.NODE_DATA_SETTING.get(settings) == false @@ -709,7 +702,7 @@ public final class InternalTestCluster extends TestCluster { return getRandomNodeAndClient(new NoDataNoMasterNodePredicate()).client(random); } - public synchronized Client startCoordinatingOnlyNode(Settings settings) { + public synchronized String startCoordinatingOnlyNode(Settings settings) { ensureOpen(); // currently unused Builder builder = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false) .put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_INGEST_SETTING.getKey(), false); @@ -717,8 +710,7 @@ public final class InternalTestCluster extends TestCluster { // if we are the first node - don't wait for a state builder.put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); } - String name = startNode(builder); - return nodes.get(name).nodeClient(); + return startNode(builder); } /** @@ -771,7 +763,7 @@ public final class InternalTestCluster extends TestCluster { } @Override - public void close() { + public synchronized void close() { if (this.open.compareAndSet(true, false)) { if (activeDisruptionScheme != null) { activeDisruptionScheme.testClusterClosed(); @@ -793,10 +785,13 @@ public final class InternalTestCluster extends TestCluster { private Client transportClient; private final AtomicBoolean closed = new AtomicBoolean(false); private final String name; + private final int nodeAndClientId; - NodeAndClient(String name, MockNode node) { + NodeAndClient(String name, MockNode node, int nodeAndClientId) { this.node = node; this.name = name; + this.nodeAndClientId = nodeAndClientId; + markNodeDataDirsAsNotEligableForWipe(node); } Node node() { @@ -806,6 +801,10 @@ public final class InternalTestCluster extends TestCluster { return node; } + public int nodeAndClientId() { + return nodeAndClientId; + } + Client client(Random random) { if (closed.get()) { throw new RuntimeException("already closed"); @@ -860,12 +859,16 @@ public final class InternalTestCluster extends TestCluster { } } + void startNode() { + node.start(); + } + void closeNode() throws IOException { - registerDataPath(); + markNodeDataDirsAsPendingForWipe(node); node.close(); } - void restart(RestartCallback callback) throws Exception { + void restart(RestartCallback callback, boolean clearDataIfNeeded) throws Exception { assert callback != null; resetClient(); if (!node.isClosed()) { @@ -875,31 +878,32 @@ public final class InternalTestCluster extends TestCluster { if (newSettings == null) { newSettings = Settings.EMPTY; } + if (clearDataIfNeeded) { + clearDataIfNeeded(callback); + } + createNewNode(newSettings); + startNode(); + } + + private void clearDataIfNeeded(RestartCallback callback) throws IOException { if (callback.clearData(name)) { - NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); + NodeEnvironment nodeEnv = node.getNodeEnvironment(); if (nodeEnv.hasNodeFile()) { - IOUtils.rm(nodeEnv.nodeDataPaths()); + final Path[] locations = nodeEnv.nodeDataPaths(); + logger.debug("removing node data paths: [{}]", Arrays.toString(locations)); + IOUtils.rm(locations); } } - startNewNode(newSettings); } - private void startNewNode(final Settings newSettings) { - final long newIdSeed = DiscoveryNodeService.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id - Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); + private void createNewNode(final Settings newSettings) { + final long newIdSeed = NodeEnvironment.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id + Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); Collection> plugins = node.getPlugins(); node = new MockNode(finalSettings, plugins); - node.start(); + markNodeDataDirsAsNotEligableForWipe(node); } - void registerDataPath() { - NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, node); - if (nodeEnv.hasNodeFile()) { - dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); - } - } - - @Override public void close() throws IOException { try { @@ -972,25 +976,42 @@ public final class InternalTestCluster extends TestCluster { } } randomlyResetClients(); - if (wipeData) { - wipeDataDirectories(); - } - if (nextNodeId.get() == sharedNodesSeeds.length && nodes.size() == sharedNodesSeeds.length) { - logger.debug("Cluster hasn't changed - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); + final int newSize = sharedNodesSeeds.length; + if (nextNodeId.get() == newSize && nodes.size() == newSize) { + if (wipeData) { + wipePendingDataDirectories(); + } + logger.debug("Cluster hasn't changed - moving out - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), newSize); return; } - logger.debug("Cluster is NOT consistent - restarting shared nodes - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); + logger.debug("Cluster is NOT consistent - restarting shared nodes - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), newSize); + + // trash all nodes with id >= sharedNodesSeeds.length - they are non shared - Set sharedNodes = new HashSet<>(); - assert sharedNodesSeeds.length == numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes; + for (Iterator iterator = nodes.values().iterator(); iterator.hasNext();) { + NodeAndClient nodeAndClient = iterator.next(); + if (nodeAndClient.nodeAndClientId() >= sharedNodesSeeds.length) { + logger.debug("Close Node [{}] not shared", nodeAndClient.name); + nodeAndClient.close(); + iterator.remove(); + } + } + + // clean up what the nodes left that is unused + if (wipeData) { + wipePendingDataDirectories(); + } + + // start any missing node + assert newSize == numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes; for (int i = 0; i < numSharedDedicatedMasterNodes; i++) { final Settings.Builder settings = Settings.builder(); settings.put(Node.NODE_MASTER_SETTING.getKey(), true).build(); settings.put(Node.NODE_DATA_SETTING.getKey(), false).build(); - NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), Version.CURRENT, true); - nodeAndClient.node().start(); - sharedNodes.add(nodeAndClient); + NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), true); + nodeAndClient.startNode(); + publishNode(nodeAndClient); } for (int i = numSharedDedicatedMasterNodes; i < numSharedDedicatedMasterNodes + numSharedDataNodes; i++) { final Settings.Builder settings = Settings.builder(); @@ -999,44 +1020,35 @@ public final class InternalTestCluster extends TestCluster { settings.put(Node.NODE_MASTER_SETTING.getKey(), false).build(); settings.put(Node.NODE_DATA_SETTING.getKey(), true).build(); } - NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), Version.CURRENT, true); - nodeAndClient.node().start(); - sharedNodes.add(nodeAndClient); + NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), true); + nodeAndClient.startNode(); + publishNode(nodeAndClient); } for (int i = numSharedDedicatedMasterNodes + numSharedDataNodes; i < numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes; i++) { final Builder settings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false) .put(Node.NODE_DATA_SETTING.getKey(), false).put(Node.NODE_INGEST_SETTING.getKey(), false); - NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), Version.CURRENT, true); - nodeAndClient.node().start(); - sharedNodes.add(nodeAndClient); - } - - for (NodeAndClient nodeAndClient : sharedNodes) { - nodes.remove(nodeAndClient.name); - } - - // trash the remaining nodes - final Collection toShutDown = nodes.values(); - for (NodeAndClient nodeAndClient : toShutDown) { - logger.debug("Close Node [{}] not shared", nodeAndClient.name); - nodeAndClient.close(); - } - nodes.clear(); - for (NodeAndClient nodeAndClient : sharedNodes) { + NodeAndClient nodeAndClient = buildNode(i, sharedNodesSeeds[i], settings.build(), true); + nodeAndClient.startNode(); publishNode(nodeAndClient); } - nextNodeId.set(sharedNodesSeeds.length); - assert size() == sharedNodesSeeds.length; - if (size() > 0) { - client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(sharedNodesSeeds.length)).get(); + + nextNodeId.set(newSize); + assert size() == newSize; + if (newSize > 0) { + ClusterHealthResponse response = client().admin().cluster().prepareHealth() + .setWaitForNodes(Integer.toString(newSize)).get(); + if (response.isTimedOut()) { + logger.warn("failed to wait for a cluster of size [{}], got [{}]", newSize, response); + throw new IllegalStateException("cluster failed to reach the expected size of [" + newSize + "]"); + } } - logger.debug("Cluster is consistent again - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), sharedNodesSeeds.length); + logger.debug("Cluster is consistent again - nodes: [{}] nextNodeId: [{}] numSharedNodes: [{}]", nodes.keySet(), nextNodeId.get(), newSize); } @Override public synchronized void afterTest() throws IOException { - wipeDataDirectories(); + wipePendingDataDirectories(); randomlyResetClients(); /* reset all clients - each test gets its own client based on the Random instance created above. */ } @@ -1105,7 +1117,8 @@ public final class InternalTestCluster extends TestCluster { } } - private void wipeDataDirectories() { + private void wipePendingDataDirectories() { + assert Thread.holdsLock(this); if (!dataDirToClean.isEmpty()) { try { for (Path path : dataDirToClean) { @@ -1122,6 +1135,22 @@ public final class InternalTestCluster extends TestCluster { } } + private void markNodeDataDirsAsPendingForWipe(Node node) { + assert Thread.holdsLock(this); + NodeEnvironment nodeEnv = node.getNodeEnvironment(); + if (nodeEnv.hasNodeFile()) { + dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); + } + } + + private void markNodeDataDirsAsNotEligableForWipe(Node node) { + assert Thread.holdsLock(this); + NodeEnvironment nodeEnv = node.getNodeEnvironment(); + if (nodeEnv.hasNodeFile()) { + dataDirToClean.removeAll(Arrays.asList(nodeEnv.nodeDataPaths())); + } + } + /** * Returns a reference to a random node's {@link ClusterService} */ @@ -1261,7 +1290,7 @@ public final class InternalTestCluster extends TestCluster { /** * Stops any of the current nodes but not the master node. */ - public void stopRandomNonMasterNode() throws IOException { + public synchronized void stopRandomNonMasterNode() throws IOException { NodeAndClient nodeAndClient = getRandomNodeAndClient(new MasterNodePredicate(getMasterName()).negate()); if (nodeAndClient != null) { logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name, getMasterName()); @@ -1302,28 +1331,28 @@ public final class InternalTestCluster extends TestCluster { /** * Restarts a random node in the cluster and calls the callback during restart. */ - private void restartRandomNode(Predicate predicate, RestartCallback callback) throws Exception { + private synchronized void restartRandomNode(Predicate predicate, RestartCallback callback) throws Exception { ensureOpen(); NodeAndClient nodeAndClient = getRandomNodeAndClient(predicate); if (nodeAndClient != null) { logger.info("Restarting random node [{}] ", nodeAndClient.name); - nodeAndClient.restart(callback); + nodeAndClient.restart(callback, true); } } /** * Restarts a node and calls the callback during restart. */ - public void restartNode(String nodeName, RestartCallback callback) throws Exception { + public synchronized void restartNode(String nodeName, RestartCallback callback) throws Exception { ensureOpen(); NodeAndClient nodeAndClient = nodes.get(nodeName); if (nodeAndClient != null) { logger.info("Restarting node [{}] ", nodeAndClient.name); - nodeAndClient.restart(callback); + nodeAndClient.restart(callback, true); } } - private void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { + private synchronized void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { ensureOpen(); List toRemove = new ArrayList<>(); try { @@ -1351,13 +1380,15 @@ public final class InternalTestCluster extends TestCluster { if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } - nodeAndClient.restart(callback); + nodeAndClient.restart(callback, true); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } } } else { int numNodesRestarted = 0; + Set[] nodesRoleOrder = new Set[nextNodeId.get()]; + Map, List> nodesByRoles = new HashMap<>(); for (NodeAndClient nodeAndClient : nodes.values()) { callback.doAfterNodes(numNodesRestarted++, nodeAndClient.nodeClient()); logger.info("Stopping node [{}] ", nodeAndClient.name); @@ -1365,25 +1396,36 @@ public final class InternalTestCluster extends TestCluster { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } nodeAndClient.closeNode(); + // delete data folders now, before we start other nodes that may claim it + nodeAndClient.clearDataIfNeeded(callback); + + DiscoveryNode discoveryNode = getInstanceFromNode(ClusterService.class, nodeAndClient.node()).localNode(); + nodesRoleOrder[nodeAndClient.nodeAndClientId()] = discoveryNode.getRoles(); + nodesByRoles.computeIfAbsent(discoveryNode.getRoles(), k -> new ArrayList<>()).add(nodeAndClient); } - // starting master nodes first, for now so restart will be quick. If we'll start - // the data nodes first, they will wait for 30s for a master - List discoveryNodes = new ArrayList<>(); - for (ClusterService clusterService : getInstances(ClusterService.class)) { - discoveryNodes.add(clusterService.localNode()); + assert nodesByRoles.values().stream().collect(Collectors.summingInt(List::size)) == nodes.size(); + + // randomize start up order, but making sure that: + // 1) A data folder that was assigned to a data node will stay so + // 2) Data nodes will get the same node lock ordinal range, so custom index paths (where the ordinal is used) + // will still belong to data nodes + for (List sameRoleNodes : nodesByRoles.values()) { + Collections.shuffle(sameRoleNodes, random); } - discoveryNodes.sort((n1, n2) -> Boolean.compare(n1.isMasterNode() == false, n2.isMasterNode() == false)); - - - for (DiscoveryNode node : discoveryNodes) { - NodeAndClient nodeAndClient = nodes.get(node.getName()); + for (Set roles : nodesRoleOrder) { + if (roles == null) { + // if some nodes were stopped, we want have a role for them + continue; + } + NodeAndClient nodeAndClient = nodesByRoles.get(roles).remove(0); logger.info("Starting node [{}] ", nodeAndClient.name); if (activeDisruptionScheme != null) { activeDisruptionScheme.removeFromNode(nodeAndClient.name, this); } - nodeAndClient.restart(callback); + // we already cleared data folders, before starting nodes up + nodeAndClient.restart(callback, false); if (activeDisruptionScheme != null) { activeDisruptionScheme.applyToNode(nodeAndClient.name, this); } @@ -1444,7 +1486,7 @@ public final class InternalTestCluster extends TestCluster { Client client = viaNode != null ? client(viaNode) : client(); ClusterState state = client.admin().cluster().prepareState().execute().actionGet().getState(); return state.nodes().getMasterNode().getName(); - } catch (Throwable e) { + } catch (Exception e) { logger.warn("Can't fetch cluster state", e); throw new RuntimeException("Can't get master node " + e.getMessage(), e); } @@ -1494,36 +1536,22 @@ public final class InternalTestCluster extends TestCluster { * Starts a node with default settings and returns it's name. */ public synchronized String startNode() { - return startNode(Settings.EMPTY, Version.CURRENT); - } - - /** - * Starts a node with default settings ad the specified version and returns it's name. - */ - public synchronized String startNode(Version version) { - return startNode(Settings.EMPTY, version); + return startNode(Settings.EMPTY); } /** * Starts a node with the given settings builder and returns it's name. */ public synchronized String startNode(Settings.Builder settings) { - return startNode(settings.build(), Version.CURRENT); + return startNode(settings.build()); } /** * Starts a node with the given settings and returns it's name. */ public synchronized String startNode(Settings settings) { - return startNode(settings, Version.CURRENT); - } - - /** - * Starts a node with the given settings and version and returns it's name. - */ - public synchronized String startNode(Settings settings, Version version) { - NodeAndClient buildNode = buildNode(settings, version); - buildNode.node().start(); + NodeAndClient buildNode = buildNode(settings); + buildNode.startNode(); publishNode(buildNode); return buildNode.name; } @@ -1557,7 +1585,7 @@ public final class InternalTestCluster extends TestCluster { public synchronized String startMasterOnlyNode(Settings settings) { Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), true).put(Node.NODE_DATA_SETTING.getKey(), false).build(); - return startNode(settings1, Version.CURRENT); + return startNode(settings1); } public synchronized Async startDataOnlyNodeAsync() { @@ -1571,7 +1599,7 @@ public final class InternalTestCluster extends TestCluster { public synchronized String startDataOnlyNode(Settings settings) { Settings settings1 = Settings.builder().put(settings).put(Node.NODE_MASTER_SETTING.getKey(), false).put(Node.NODE_DATA_SETTING.getKey(), true).build(); - return startNode(settings1, Version.CURRENT); + return startNode(settings1); } /** @@ -1592,9 +1620,9 @@ public final class InternalTestCluster extends TestCluster { * Starts a node in an async manner with the given settings and version and returns future with its name. */ public synchronized Async startNodeAsync(final Settings settings, final Version version) { - final NodeAndClient buildNode = buildNode(settings, version); + final NodeAndClient buildNode = buildNode(settings); final Future submit = executor.submit(() -> { - buildNode.node().start(); + buildNode.startNode(); publishNode(buildNode); return buildNode.name; }); @@ -1653,10 +1681,6 @@ public final class InternalTestCluster extends TestCluster { private synchronized void publishNode(NodeAndClient nodeAndClient) { assert !nodeAndClient.node().isClosed(); - NodeEnvironment nodeEnv = getInstanceFromNode(NodeEnvironment.class, nodeAndClient.node); - if (nodeEnv.hasNodeFile()) { - dataDirToClean.addAll(Arrays.asList(nodeEnv.nodeDataPaths())); - } nodes.put(nodeAndClient.name, nodeAndClient); applyDisruptionSchemeToNode(nodeAndClient); } @@ -1934,7 +1958,8 @@ public final class InternalTestCluster extends TestCluster { public void assertAfterTest() throws IOException { super.assertAfterTest(); assertRequestsFinished(); - for (NodeEnvironment env : this.getInstances(NodeEnvironment.class)) { + for (NodeAndClient nodeAndClient : nodes.values()) { + NodeEnvironment env = nodeAndClient.node().getNodeEnvironment(); Set shardIds = env.lockedShards(); for (ShardId id : shardIds) { try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java index 10469286e1a..1d0eaa7ce51 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -53,7 +54,7 @@ public class StreamsUtils { } try (BytesStreamOutput out = new BytesStreamOutput()) { Streams.copy(is, out); - return out.bytes().toBytes(); + return BytesReference.toBytes(out.bytes()); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 8122abe483d..97240bd9e5e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.analysis.AnalysisService; @@ -311,12 +312,12 @@ public class TestSearchContext extends SearchContext { } @Override - public long timeoutInMillis() { - return 0; + public TimeValue timeout() { + return TimeValue.ZERO; } @Override - public void timeoutInMillis(long timeoutInMillis) { + public void timeout(TimeValue timeout) { } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 5525baf4206..d09c763322c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -44,7 +44,7 @@ public class VersionUtils { try { Version object = (Version) field.get(null); ids.add(object.id); - } catch (Throwable e) { + } catch (IllegalAccessException e) { throw new RuntimeException(e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 6ff45608700..1d91b0980e4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -50,8 +50,8 @@ public class NoOpClient extends AbstractClient { public void close() { try { ThreadPool.terminate(threadPool(), 10, TimeUnit.SECONDS); - } catch (Throwable t) { - throw new ElasticsearchException(t.getMessage(), t); + } catch (Exception e) { + throw new ElasticsearchException(e.getMessage(), e); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 7c832118ca0..48b726dc3cc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -126,7 +126,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { } @SuppressForbidden(reason = "we know we pass a IP address") - protected synchronized static int[] unicastHostPorts(int numHosts) { + protected static synchronized int[] unicastHostPorts(int numHosts) { int[] unicastHostPorts = new int[numHosts]; final int basePort = calcBasePort(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index cbcb9766943..956088f0fd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -76,8 +76,8 @@ public class BlockClusterStateProcessing extends SingleNodeDisruption { } @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected error during disruption", t); + public void onFailure(String source, Exception e) { + logger.error("unexpected error during disruption", e); } }); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java new file mode 100644 index 00000000000..1a9c2b686c3 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.disruption; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.transport.MockTransportService; + +import java.util.Random; + +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * A partition that breaks the cluster into two groups of nodes. The two groups are fully isolated + * with the exception of a single node that can see and be seen by all nodes in both groups. + */ +public class BridgePartition extends NetworkPartition { + + String bridgeNode; + final boolean unresponsive; + + public BridgePartition(Random random, boolean unresponsive) { + super(random); + this.unresponsive = unresponsive; + } + + @Override + public void applyToCluster(InternalTestCluster cluster) { + bridgeNode = randomFrom(random, cluster.getNodeNames()); + this.cluster = cluster; + for (String node: cluster.getNodeNames()) { + if (node.equals(bridgeNode) == false) { + super.applyToNode(node, cluster); + } + } + } + + @Override + public TimeValue expectedTimeToHeal() { + return TimeValue.timeValueSeconds(0); + } + + @Override + void applyDisruption(MockTransportService transportService1, MockTransportService transportService2) { + if (unresponsive) { + transportService1.addUnresponsiveRule(transportService2); + transportService2.addUnresponsiveRule(transportService1); + } else { + transportService1.addFailToSendNoConnectRule(transportService2); + transportService2.addFailToSendNoConnectRule(transportService1); + } + } + + @Override + protected String getPartitionDescription() { + return "bridge (super connected node: [" + bridgeNode + "], unresponsive [" + unresponsive + "])"; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java index d957220c6df..caab35e4b42 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java @@ -61,7 +61,7 @@ public class IntermittentLongGCDisruption extends LongGCDisruption { this.delayDurationMax = delayDurationMax; } - final static AtomicInteger thread_ids = new AtomicInteger(); + static final AtomicInteger thread_ids = new AtomicInteger(); @Override public void startDisrupting() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java index 591540e72b9..b210a20cf70 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java @@ -33,7 +33,7 @@ import java.util.regex.Pattern; */ public class LongGCDisruption extends SingleNodeDisruption { - private final static Pattern[] unsafeClasses = new Pattern[]{ + private static final Pattern[] unsafeClasses = new Pattern[]{ // logging has shared JVM locks - we may suspend a thread and block other nodes from doing their thing Pattern.compile("Logger") }; diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java index be0b69a8e8b..f69c0a3085d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java @@ -124,7 +124,7 @@ public class SlowClusterStateProcessing extends SingleNodeDisruption { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { countDownLatch.countDown(); } }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index bf32b6b8575..304e3047496 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -172,7 +172,7 @@ public final class MockEngineSupport { return reader; } - public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader { + public abstract static class DirectoryReaderWrapper extends FilterDirectoryReader { protected final SubReaderWrapper subReaderWrapper; public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 73281b3f6ea..37ed43b9450 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -62,18 +62,18 @@ public class ThrowingLeafReaderWrapper extends FilterLeafReader { * A callback interface that allows to throw certain exceptions for * methods called on the IndexReader that is wrapped by {@link ThrowingLeafReaderWrapper} */ - public static interface Thrower { + public interface Thrower { /** * Maybe throws an exception ;) */ - public void maybeThrow(Flags flag) throws IOException; + void maybeThrow(Flags flag) throws IOException; /** * If this method returns true the {@link Terms} instance for the given field * is wrapped with Thrower support otherwise no exception will be thrown for * the current {@link Terms} instance or any other instance obtained from it. */ - public boolean wrapTerms(String field); + boolean wrapTerms(String field); } public ThrowingLeafReaderWrapper(LeafReader in, Thrower thrower) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 51d15e019a4..24af2b72bd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -95,9 +95,6 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -/** - * - */ public class ElasticsearchAssertions { public static void assertAcked(AcknowledgedRequestBuilder builder) { @@ -555,7 +552,6 @@ public class ElasticsearchAssertions { extraInfo += " with status [" + status + "]"; } - try { future.actionGet(); fail = true; @@ -565,7 +561,7 @@ public class ElasticsearchAssertions { if (status != null) { assertThat(extraInfo, ExceptionsHelper.status(esException), equalTo(status)); } - } catch (Throwable e) { + } catch (Exception e) { assertThat(extraInfo, e, instanceOf(exceptionClass)); if (status != null) { assertThat(extraInfo, ExceptionsHelper.status(e), equalTo(status)); @@ -597,7 +593,7 @@ public class ElasticsearchAssertions { try { future.actionGet(); fail = true; - } catch (Throwable e) { + } catch (Exception e) { assertThat(extraInfo, ExceptionsHelper.status(e), equalTo(status)); } // has to be outside catch clause to get a proper message @@ -631,7 +627,7 @@ public class ElasticsearchAssertions { registry = ESIntegTestCase.internalCluster().getInstance(NamedWriteableRegistry.class); } else { registry = new NamedWriteableRegistry(); - new SearchModule(Settings.EMPTY, registry); + new SearchModule(Settings.EMPTY, registry, false); } assertVersionSerializable(version, streamable, registry); } @@ -647,7 +643,7 @@ public class ElasticsearchAssertions { ((ActionRequest) streamable).validate(); } BytesReference orig = serialize(version, streamable); - StreamInput input = StreamInput.wrap(orig); + StreamInput input = orig.streamInput(); if (namedWriteableRegistry != null) { input = new NamedWriteableAwareStreamInput(input, namedWriteableRegistry); } @@ -657,35 +653,38 @@ public class ElasticsearchAssertions { equalTo(0)); assertThat("Serialization failed with version [" + version + "] bytes should be equal for streamable [" + streamable + "]", serialize(version, streamable), equalTo(orig)); - } catch (Throwable ex) { + } catch (Exception ex) { throw new RuntimeException("failed to check serialization - version [" + version + "] for streamable [" + streamable + "]", ex); } } - public static void assertVersionSerializable(Version version, final Throwable t) { - ElasticsearchAssertions.assertVersionSerializable(version, new ThrowableWrapper(t)); + public static void assertVersionSerializable(Version version, final Exception e) { + ElasticsearchAssertions.assertVersionSerializable(version, new ExceptionWrapper(e)); } - public static final class ThrowableWrapper implements Streamable { - Throwable throwable; - public ThrowableWrapper(Throwable t) { - throwable = t; + public static final class ExceptionWrapper implements Streamable { + + private Exception exception; + + public ExceptionWrapper(Exception e) { + exception = e; } - public ThrowableWrapper() { - throwable = null; + public ExceptionWrapper() { + exception = null; } @Override public void readFrom(StreamInput in) throws IOException { - throwable = in.readThrowable(); + exception = in.readException(); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeThrowable(throwable); + out.writeException(exception); } + } @@ -697,7 +696,7 @@ public class ElasticsearchAssertions { assertThat(constructor, Matchers.notNullValue()); Streamable newInstance = constructor.newInstance(); return newInstance; - } catch (Throwable e) { + } catch (Exception e) { return null; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 12e46087b6c..e8895aa90db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -359,6 +359,9 @@ public abstract class ESRestTestCase extends ESTestCase { //skip test if the whole suite (yaml file) is disabled assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()), testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if the whole suite (yaml file) is disabled + assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getTeardownSection().getSkipSection()), + testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); //skip test if test section is disabled assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()), testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); @@ -391,8 +394,16 @@ public abstract class ESRestTestCase extends ESTestCase { restTestExecutionContext.clear(); - for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { - executableSection.execute(restTestExecutionContext); + try { + for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { + executableSection.execute(restTestExecutionContext); + } + } finally { + logger.debug("start teardown test [{}]", testCandidate.getTestPath()); + for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) { + doSection.execute(restTestExecutionContext); + } + logger.debug("end teardown test [{}]", testCandidate.getTestPath()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java similarity index 78% rename from test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java index b338d76d985..8c492d279b0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java @@ -16,11 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.json; +package org.elasticsearch.test.rest; +import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.Stash; import java.io.IOException; import java.util.ArrayList; @@ -28,22 +27,23 @@ import java.util.List; import java.util.Map; /** - * Holds a json object and allows to extract specific values from it + * Holds an object and allows to extract specific values from it given their path */ -public class JsonPath { +public class ObjectPath { - final String json; - final Map jsonMap; + private final Object object; - public JsonPath(String json) throws IOException { - this.json = json; - this.jsonMap = convertToMap(json); + public static ObjectPath createFromXContent(XContent xContent, String input) throws IOException { + try (XContentParser parser = xContent.createParser(input)) { + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + return new ObjectPath(parser.listOrderedMap()); + } + return new ObjectPath(parser.mapOrdered()); + } } - private static Map convertToMap(String json) throws IOException { - try (XContentParser parser = JsonXContent.jsonXContent.createParser(json)) { - return parser.mapOrdered(); - } + public ObjectPath(Object object) { + this.object = object; } /** @@ -58,7 +58,7 @@ public class JsonPath { */ public Object evaluate(String path, Stash stash) throws IOException { String[] parts = parsePath(path); - Object object = jsonMap; + Object object = this.object; for (String part : parts) { object = evaluate(part, object, stash); if (object == null) { @@ -70,8 +70,8 @@ public class JsonPath { @SuppressWarnings("unchecked") private Object evaluate(String key, Object object, Stash stash) throws IOException { - if (stash.isStashedValue(key)) { - key = stash.unstashValue(key).toString(); + if (stash.containsStashedValue(key)) { + key = stash.getValue(key).toString(); } if (object instanceof Map) { @@ -84,7 +84,8 @@ public class JsonPath { } catch (NumberFormatException e) { throw new IllegalArgumentException("element was a list, but [" + key + "] was not numeric", e); } catch (IndexOutOfBoundsException e) { - throw new IllegalArgumentException("element was a list with " + list.size() + " elements, but [" + key + "] was out of bounds", e); + throw new IllegalArgumentException("element was a list with " + list.size() + + " elements, but [" + key + "] was out of bounds", e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java index e454c396a3d..57c7e1b1305 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.test.rest.section.RestTestSuite; import org.elasticsearch.test.rest.section.SetupSection; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; /** @@ -56,6 +57,10 @@ public class RestTestCandidate { return restTestSuite.getSetupSection(); } + public TeardownSection getTeardownSection() { + return restTestSuite.getTeardownSection(); + } + public TestSection getTestSection() { return testSection; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 34397f03d94..d7295e1dca7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -66,8 +66,8 @@ public class RestTestExecutionContext implements Closeable { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { - if (stash.isStashedValue(entry.getValue())) { - entry.setValue(stash.unstashValue(entry.getValue()).toString()); + if (stash.containsStashedValue(entry.getValue())) { + entry.setValue(stash.getValue(entry.getValue()).toString()); } } @@ -76,7 +76,7 @@ public class RestTestExecutionContext implements Closeable { try { response = callApiInternal(apiName, requestParams, body, headers); //we always stash the last response body - stash.stashResponse(response); + stash.stashValue("body", response.getBody()); return response; } catch(ResponseException e) { response = new RestTestResponse(e); @@ -90,12 +90,12 @@ public class RestTestExecutionContext implements Closeable { } if (bodies.size() == 1) { - return bodyAsString(stash.unstashMap(bodies.get(0))); + return bodyAsString(stash.replaceStashedValues(bodies.get(0))); } StringBuilder bodyBuilder = new StringBuilder(); for (Map body : bodies) { - bodyBuilder.append(bodyAsString(stash.unstashMap(body))).append("\n"); + bodyBuilder.append(bodyAsString(stash.replaceStashedValues(body))).append("\n"); } return bodyBuilder.toString(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java index 885df395c2b..f687f2b39bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java @@ -24,25 +24,27 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.test.rest.client.RestTestResponse; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Allows to cache the last obtained test response and or part of it within variables * that can be used as input values in following requests and assertions. */ public class Stash implements ToXContent { + private static final Pattern EXTENDED_KEY = Pattern.compile("\\$\\{([^}]+)\\}"); private static final ESLogger logger = Loggers.getLogger(Stash.class); public static final Stash EMPTY = new Stash(); private final Map stash = new HashMap<>(); - private RestTestResponse response; + private final ObjectPath stashObjectPath = new ObjectPath(stash); /** * Allows to saved a specific field in the stash as key-value pair @@ -55,12 +57,6 @@ public class Stash implements ToXContent { } } - public void stashResponse(RestTestResponse response) throws IOException { - // TODO we can almost certainly save time by lazily evaluating the body - stashValue("body", response.getBody()); - this.response = response; - } - /** * Clears the previously stashed values */ @@ -69,41 +65,63 @@ public class Stash implements ToXContent { } /** - * Tells whether a particular value needs to be looked up in the stash + * Tells whether a particular key needs to be looked up in the stash based on its name. + * Returns true if the string representation of the key starts with "$", false otherwise * The stash contains fields eventually extracted from previous responses that can be reused * as arguments for following requests (e.g. scroll_id) */ - public boolean isStashedValue(Object key) { - if (key == null) { + public boolean containsStashedValue(Object key) { + if (key == null || false == key instanceof CharSequence) { return false; } String stashKey = key.toString(); - return Strings.hasLength(stashKey) && stashKey.startsWith("$"); + if (false == Strings.hasLength(stashKey)) { + return false; + } + if (stashKey.startsWith("$")) { + return true; + } + return EXTENDED_KEY.matcher(stashKey).find(); } /** - * Extracts a value from the current stash + * Retrieves a value from the current stash. * The stash contains fields eventually extracted from previous responses that can be reused * as arguments for following requests (e.g. scroll_id) */ - public Object unstashValue(String value) throws IOException { - if (value.startsWith("$body.")) { - if (response == null) { - return null; - } - return response.evaluate(value.substring("$body".length()), this); + public Object getValue(String key) throws IOException { + if (key.charAt(0) == '$' && key.charAt(1) != '{') { + return unstash(key.substring(1)); } - Object stashedValue = stash.get(value.substring(1)); + Matcher matcher = EXTENDED_KEY.matcher(key); + /* + * String*Buffer* because that is what the Matcher API takes. In modern versions of java the uncontended synchronization is very, + * very cheap so that should not be a problem. + */ + StringBuffer result = new StringBuffer(key.length()); + if (false == matcher.find()) { + throw new IllegalArgumentException("Doesn't contain any stash keys [" + key + "]"); + } + do { + matcher.appendReplacement(result, Matcher.quoteReplacement(unstash(matcher.group(1)).toString())); + } while (matcher.find()); + matcher.appendTail(result); + return result.toString(); + } + + private Object unstash(String key) throws IOException { + Object stashedValue = stashObjectPath.evaluate(key); if (stashedValue == null) { - throw new IllegalArgumentException("stashed value not found for key [" + value + "]"); + throw new IllegalArgumentException("stashed value not found for key [" + key + "]"); } return stashedValue; } /** - * Recursively unstashes map values if needed + * Goes recursively against each map entry and replaces any string value starting with "$" with its + * corresponding value retrieved from the stash */ - public Map unstashMap(Map map) throws IOException { + public Map replaceStashedValues(Map map) throws IOException { Map copy = new HashMap<>(map); unstashObject(copy); return copy; @@ -115,8 +133,8 @@ public class Stash implements ToXContent { List list = (List) obj; for (int i = 0; i < list.size(); i++) { Object o = list.get(i); - if (isStashedValue(o)) { - list.set(i, unstashValue(o.toString())); + if (containsStashedValue(o)) { + list.set(i, getValue(o.toString())); } else { unstashObject(o); } @@ -125,8 +143,8 @@ public class Stash implements ToXContent { if (obj instanceof Map) { Map map = (Map) obj; for (Map.Entry entry : map.entrySet()) { - if (isStashedValue(entry.getValue())) { - entry.setValue(unstashValue(entry.getValue().toString())); + if (containsStashedValue(entry.getValue())) { + entry.setValue(getValue(entry.getValue().toString())); } else { unstashObject(entry.getValue()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java index 5b5773d6fdc..4644b87b8e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java @@ -23,23 +23,24 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.Stash; -import org.elasticsearch.test.rest.json.JsonPath; import java.io.IOException; import java.nio.charset.StandardCharsets; /** * Response obtained from a REST call, eagerly reads the response body into a string for later optional parsing. - * Supports parsing the response body as json when needed and returning specific values extracted from it. + * Supports parsing the response body when needed and returning specific values extracted from it. */ public class RestTestResponse { private final Response response; private final String body; - private JsonPath parsedResponse; + private ObjectPath parsedResponse; - public RestTestResponse(Response response) { + public RestTestResponse(Response response) throws IOException { this.response = response; if (response.getEntity() != null) { try { @@ -53,11 +54,24 @@ public class RestTestResponse { } else { this.body = null; } + parseResponseBody(); } - public RestTestResponse(ResponseException responseException) { + public RestTestResponse(ResponseException responseException) throws IOException { this.response = responseException.getResponse(); this.body = responseException.getResponseBody(); + parseResponseBody(); + } + + private void parseResponseBody() throws IOException { + if (body != null) { + String contentType = response.getHeader("Content-Type"); + XContentType xContentType = XContentType.fromMediaTypeOrFormat(contentType); + //skip parsing if we got text back (e.g. if we called _cat apis) + if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { + this.parsedResponse = ObjectPath.createFromXContent(xContentType.xContent(), body); + } + } } public int getStatusCode() { @@ -73,11 +87,7 @@ public class RestTestResponse { * Might be a string or a json object parsed as a map. */ public Object getBody() throws IOException { - if (isJson()) { - JsonPath parsedResponse = parsedResponse(); - if (parsedResponse == null) { - return null; - } + if (parsedResponse != null) { return parsedResponse.evaluate(""); } return body; @@ -95,23 +105,21 @@ public class RestTestResponse { } /** - * Parses the response body as json and extracts a specific value from it (identified by the provided path) + * Parses the response body and extracts a specific value from it (identified by the provided path) */ public Object evaluate(String path) throws IOException { return evaluate(path, Stash.EMPTY); } /** - * Parses the response body as json and extracts a specific value from it (identified by the provided path) + * Parses the response body and extracts a specific value from it (identified by the provided path) */ public Object evaluate(String path, Stash stash) throws IOException { if (response == null) { return null; } - JsonPath jsonPath = parsedResponse(); - - if (jsonPath == null) { + if (parsedResponse == null) { //special case: api that don't support body (e.g. exists) return true if 200, false if 404, even if no body //is_true: '' means the response had no body but the client returned true (caused by 200) //is_false: '' means the response had no body but the client returned false (caused by 404) @@ -121,21 +129,6 @@ public class RestTestResponse { return null; } - return jsonPath.evaluate(path, stash); - } - - private boolean isJson() { - String contentType = response.getHeader("Content-Type"); - return contentType != null && contentType.contains("application/json"); - } - - private JsonPath parsedResponse() throws IOException { - if (parsedResponse != null) { - return parsedResponse; - } - if (response == null || body == null) { - return null; - } - return parsedResponse = new JsonPath(body); + return parsedResponse.evaluate(path, stash); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java index 68f833d35c7..7a4cd0f316a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java @@ -33,7 +33,8 @@ public class GreaterThanEqualToParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("gte section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java index a66122138c1..7e1ca1ece7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java @@ -32,7 +32,8 @@ public class GreaterThanParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("gt section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java index f2d53d05a56..a30979c6a3c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java @@ -33,7 +33,8 @@ public class LessThanOrEqualToParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("lte section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanOrEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java index 065dd19d6a1..fc31f221758 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java @@ -32,7 +32,8 @@ public class LessThanParser implements RestTestFragmentParser public LessThanAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("lt section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java index 0a0c2722020..f7325443deb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java @@ -18,20 +18,21 @@ */ package org.elasticsearch.test.rest.parser; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.test.rest.section.DoSection; import org.elasticsearch.test.rest.section.ExecutableSection; -import org.elasticsearch.test.rest.section.ResponseBodyAssertion; import org.elasticsearch.test.rest.section.SetupSection; import org.elasticsearch.test.rest.section.SkipSection; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + /** * Context shared across the whole tests parse phase. * Provides shared parse methods and holds information needed to parse the test sections (e.g. es version) @@ -39,6 +40,7 @@ import org.elasticsearch.test.rest.section.TestSection; public class RestTestSuiteParseContext { private static final SetupSectionParser SETUP_SECTION_PARSER = new SetupSectionParser(); + private static final TeardownSectionParser TEARDOWN_SECTION_PARSER = new TeardownSectionParser(); private static final RestTestSectionParser TEST_SECTION_PARSER = new RestTestSectionParser(); private static final SkipSectionParser SKIP_SECTION_PARSER = new SkipSectionParser(); private static final DoSectionParser DO_SECTION_PARSER = new DoSectionParser(); @@ -54,7 +56,6 @@ public class RestTestSuiteParseContext { EXECUTABLE_SECTIONS_PARSERS.put("lt", new LessThanParser()); EXECUTABLE_SECTIONS_PARSERS.put("lte", new LessThanOrEqualToParser()); EXECUTABLE_SECTIONS_PARSERS.put("length", new LengthParser()); - EXECUTABLE_SECTIONS_PARSERS.put("response_body", ResponseBodyAssertion.PARSER); } private final String api; @@ -93,6 +94,19 @@ public class RestTestSuiteParseContext { return SetupSection.EMPTY; } + public TeardownSection parseTeardownSection() throws IOException, RestTestParseException { + advanceToFieldName(); + + if ("teardown".equals(parser.currentName())) { + parser.nextToken(); + TeardownSection teardownSection = TEARDOWN_SECTION_PARSER.parse(this); + parser.nextToken(); + return teardownSection; + } + + return TeardownSection.EMPTY; + } + public TestSection parseTestSection() throws IOException, RestTestParseException { return TEST_SECTION_PARSER.parse(this); } @@ -144,7 +158,8 @@ public class RestTestSuiteParseContext { token = parser.nextToken(); } if (token != XContentParser.Token.FIELD_NAME) { - throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + parser.getTokenLocation()); + throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + + parser.getTokenLocation()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java index d3f93939c2e..f22f0109594 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.parser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; import java.io.IOException; @@ -70,11 +71,13 @@ public class RestTestSuiteParser implements RestTestFragmentParser { + + @Override + public TeardownSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + XContentParser parser = parseContext.parser(); + + TeardownSection teardownSection = new TeardownSection(); + teardownSection.setSkipSection(parseContext.parseSkipSection()); + + while (parser.currentToken() != XContentParser.Token.END_ARRAY) { + parseContext.advanceToFieldName(); + if (!"do".equals(parser.currentName())) { + throw new RestTestParseException("section [" + parser.currentName() + "] not supported within teardown section"); + } + + parser.nextToken(); + teardownSection.addDoSection(parseContext.parseDoSection()); + parser.nextToken(); + } + + parser.nextToken(); + return teardownSection; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java index c420309f206..fbba9de163b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java @@ -48,18 +48,18 @@ public abstract class Assertion implements ExecutableSection { if (expectedValue instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) expectedValue; - return executionContext.stash().unstashMap(map); + return executionContext.stash().replaceStashedValues(map); } - if (executionContext.stash().isStashedValue(expectedValue)) { - return executionContext.stash().unstashValue(expectedValue.toString()); + if (executionContext.stash().containsStashedValue(expectedValue)) { + return executionContext.stash().getValue(expectedValue.toString()); } return expectedValue; } protected final Object getActualValue(RestTestExecutionContext executionContext) throws IOException { - if (executionContext.stash().isStashedValue(field)) { - return executionContext.stash().unstashValue(field); + if (executionContext.stash().containsStashedValue(field)) { + return executionContext.stash().getValue(field); } return executionContext.response(field); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java index ade7fbd59ca..63f69696653 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java @@ -43,8 +43,10 @@ public class GreaterThanAssertion extends Assertion { @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, greaterThan((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java index cfdca7bc338..297eecf2d2a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java @@ -43,8 +43,10 @@ public class GreaterThanEqualToAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than or equal to [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, greaterThanOrEqualTo((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java index 265487a0388..eb28ba01a94 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java @@ -44,7 +44,8 @@ public class LengthAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] has length [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("expected value of [" + getField() + "] is not numeric (got [" + expectedValue.getClass() + "]", expectedValue, instanceOf(Number.class)); + assertThat("expected value of [" + getField() + "] is not numeric (got [" + expectedValue.getClass() + "]", + expectedValue, instanceOf(Number.class)); int length = ((Number) expectedValue).intValue(); if (actualValue instanceof String) { assertThat(errorMessage(), ((String) actualValue).length(), equalTo(length)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java index 89387ff8952..153a7824569 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java @@ -44,8 +44,10 @@ public class LessThanAssertion extends Assertion { @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is less than [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, lessThan((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java index 99cbf1155d5..1eb3a9fc2b2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java @@ -43,8 +43,10 @@ public class LessThanOrEqualToAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is less than or equal to [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, lessThanOrEqualTo((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java index e00fbbea01c..3a96d4532a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java @@ -18,15 +18,21 @@ */ package org.elasticsearch.test.rest.section; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; import java.util.regex.Pattern; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; /** @@ -45,12 +51,12 @@ public class MatchAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { - //if the value is wrapped into / it is a regexp (e.g. /s+d+/) if (expectedValue instanceof String) { String expValue = ((String) expectedValue).trim(); if (expValue.length() > 2 && expValue.startsWith("/") && expValue.endsWith("/")) { - assertThat("field [" + getField() + "] was expected to be of type String but is an instanceof [" + safeClass(actualValue) + "]", actualValue, instanceOf(String.class)); + assertThat("field [" + getField() + "] was expected to be of type String but is an instanceof [" + + safeClass(actualValue) + "]", actualValue, instanceOf(String.class)); String stringValue = (String) actualValue; String regex = expValue.substring(1, expValue.length() - 1); logger.trace("assert that [{}] matches [{}]", stringValue, regex); @@ -60,20 +66,131 @@ public class MatchAssertion extends Assertion { } } - assertThat(errorMessage(), actualValue, notNullValue()); + assertNotNull("field [" + getField() + "] is null", actualValue); logger.trace("assert that [{}] matches [{}] (field [{}])", actualValue, expectedValue, getField()); - if (!actualValue.getClass().equals(safeClass(expectedValue))) { + if (actualValue.getClass().equals(safeClass(expectedValue)) == false) { if (actualValue instanceof Number && expectedValue instanceof Number) { //Double 1.0 is equal to Integer 1 - assertThat(errorMessage(), ((Number) actualValue).doubleValue(), equalTo(((Number) expectedValue).doubleValue())); + assertThat("field [" + getField() + "] doesn't match the expected value", + ((Number) actualValue).doubleValue(), equalTo(((Number) expectedValue).doubleValue())); return; } } - assertThat(errorMessage(), actualValue, equalTo(expectedValue)); + if (expectedValue.equals(actualValue) == false) { + FailureMessage message = new FailureMessage(getField()); + message.compare(getField(), actualValue, expectedValue); + throw new AssertionError(message.message); + } } - private String errorMessage() { - return "field [" + getField() + "] doesn't match the expected value"; + private static class FailureMessage { + private final StringBuilder message; + private int indent = 0; + + private FailureMessage(String field) { + this.message = new StringBuilder(field + " didn't match the expected value:\n"); + } + + private void compareMaps(Map actual, Map expected) { + actual = new TreeMap<>(actual); + expected = new TreeMap<>(expected); + for (Map.Entry expectedEntry : expected.entrySet()) { + compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue()); + } + for (Map.Entry unmatchedEntry : actual.entrySet()) { + field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); + } + } + + private void compareLists(List actual, List expected) { + int i = 0; + while (i < actual.size() && i < expected.size()) { + compare(Integer.toString(i), actual.get(i), expected.get(i)); + i++; + } + if (actual.size() == expected.size()) { + return; + } + indent(); + if (actual.size() < expected.size()) { + message.append("expected [").append(expected.size() - i).append("] more entries\n"); + return; + } + message.append("received [").append(actual.size() - i).append("] more entries than expected\n"); + } + + private void compare(String field, @Nullable Object actual, Object expected) { + if (expected instanceof Map) { + if (actual == null) { + field(field, "expected map but not found"); + return; + } + if (false == actual instanceof Map) { + field(field, "expected map but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + Map expectedMap = (Map) expected; + @SuppressWarnings("unchecked") + Map actualMap = (Map) actual; + if (expectedMap.isEmpty() && actualMap.isEmpty()) { + field(field, "same [empty map]"); + return; + } + field(field, null); + indent += 1; + compareMaps(actualMap, expectedMap); + indent -= 1; + return; + } + if (expected instanceof List) { + if (actual == null) { + field(field, "expected list but not found"); + return; + } + if (false == actual instanceof List) { + field(field, "expected list but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + List expectedList = (List) expected; + @SuppressWarnings("unchecked") + List actualList = (List) actual; + if (expectedList.isEmpty() && actualList.isEmpty()) { + field(field, "same [empty list]"); + return; + } + field(field, null); + indent += 1; + compareLists(actualList, expectedList); + indent -= 1; + return; + } + if (actual == null) { + field(field, "expected [" + expected + "] but not found"); + return; + } + if (Objects.equals(expected, actual)) { + field(field, "same [" + expected + "]"); + return; + } + field(field, "expected [" + expected + "] but was [" + actual + "]"); + } + + private void indent() { + for (int i = 0; i < indent; i++) { + message.append(" "); + } + } + + private void field(Object name, String info) { + indent(); + message.append(String.format(Locale.ROOT, "%30s: ", name)); + if (info != null) { + message.append(info); + } + message.append('\n'); + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java deleted file mode 100644 index 3ead65a2111..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.section; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.parser.RestTestFragmentParser; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; - -/** - * Checks that the response body matches some text. - */ -public class ResponseBodyAssertion extends Assertion { - public static final RestTestFragmentParser PARSER = new RestTestFragmentParser() { - @Override - public ResponseBodyAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { - try (XContentParser parser = JsonXContent.jsonXContent.createParser(parseContext.parseField())) { - return new ResponseBodyAssertion("$body", parser.map()); - } - } - }; - - private ResponseBodyAssertion(String field, Map expectedValue) { - super(field, expectedValue); - } - - @Override - protected void doAssert(Object actualValue, Object expectedValue) { - if (false == expectedValue.equals(actualValue)) { - @SuppressWarnings("unchecked") - Map actual = (Map) actualValue; - @SuppressWarnings("unchecked") - Map expected = (Map) expectedValue; - FailureMessage message = new FailureMessage(); - message.compareMaps(actual, expected); - throw new AssertionError(message.message); - } - } - - private class FailureMessage { - private final StringBuilder message = new StringBuilder("body didn't match the expected value:\n"); - private int indent = 0; - - private void compareMaps(Map actual, Map expected) { - actual = new TreeMap<>(actual); - expected = new TreeMap<>(expected); - for (Map.Entry expectedEntry : expected.entrySet()) { - compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue()); - } - for (Map.Entry unmatchedEntry : actual.entrySet()) { - field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); - } - } - - private void compareLists(List actual, List expected) { - int i = 0; - while (i < actual.size() && i < expected.size()) { - compare(i, actual.get(i), expected.get(i)); - i++; - } - if (actual.size() == expected.size()) { - return; - } - indent(); - if (actual.size() < expected.size()) { - message.append("expected [").append(expected.size() - i).append("] more entries\n"); - return; - } - message.append("received [").append(actual.size() - i).append("] more entries than expected\n"); - } - - private void compare(Object field, @Nullable Object actual, Object expected) { - if (expected instanceof Map) { - if (actual == null) { - field(field, "expected map but not found"); - return; - } - if (false == actual instanceof Map) { - field(field, "expected map but found [" + actual + "]"); - return; - } - @SuppressWarnings("unchecked") - Map expectedMap = (Map) expected; - @SuppressWarnings("unchecked") - Map actualMap = (Map) actual; - if (expectedMap.isEmpty() && actualMap.isEmpty()) { - field(field, "same [empty map]"); - return; - } - field(field, null); - indent += 1; - compareMaps(actualMap, expectedMap); - indent -= 1; - return; - } - if (expected instanceof List) { - if (actual == null) { - field(field, "expected list but not found"); - return; - } - if (false == actual instanceof List) { - field(field, "expected list but found [" + actual + "]"); - return; - } - @SuppressWarnings("unchecked") - List expectedList = (List) expected; - @SuppressWarnings("unchecked") - List actualList = (List) actual; - if (expectedList.isEmpty() && actualList.isEmpty()) { - field(field, "same [empty list]"); - return; - } - field(field, null); - indent += 1; - compareLists(actualList, expectedList); - indent -= 1; - return; - } - if (actual == null) { - field(field, "expected [" + expected + "] but not found"); - return; - } - if (Objects.equals(expected, actual)) { - field(field, "same [" + expected + "]"); - return; - } - field(field, "expected [" + expected + "] but was [" + actual + "]"); - } - - private void indent() { - for (int i = 0; i < indent; i++) { - message.append(" "); - } - } - - private void field(Object name, String info) { - indent(); - message.append(String.format(Locale.ROOT, "%30s: ", name)); - if (info != null) { - message.append(info); - } - message.append('\n'); - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java index d53671bc6bc..5c093be3fa0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java @@ -33,6 +33,7 @@ public class RestTestSuite { private final String name; private SetupSection setupSection; + private TeardownSection teardownSection; private Set testSections = new TreeSet<>(); @@ -61,6 +62,14 @@ public class RestTestSuite { this.setupSection = setupSection; } + public TeardownSection getTeardownSection() { + return teardownSection; + } + + public void setTeardownSection(TeardownSection teardownSection) { + this.teardownSection = teardownSection; + } + /** * Adds a {@link org.elasticsearch.test.rest.section.TestSection} to the REST suite * @return true if the test section was not already present, false otherwise diff --git a/core/src/main/java/org/elasticsearch/common/netty/ReleaseChannelFutureListener.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java similarity index 50% rename from core/src/main/java/org/elasticsearch/common/netty/ReleaseChannelFutureListener.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java index 6f2979d13ca..b3709472be5 100644 --- a/core/src/main/java/org/elasticsearch/common/netty/ReleaseChannelFutureListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java @@ -17,26 +17,40 @@ * under the License. */ -package org.elasticsearch.common.netty; +package org.elasticsearch.test.rest.section; -import org.elasticsearch.common.lease.Releasable; -import org.jboss.netty.channel.ChannelFuture; -import org.jboss.netty.channel.ChannelFutureListener; +import java.util.ArrayList; +import java.util.List; -/** - * A channel listener that releases a {@link org.elasticsearch.common.lease.Releasable} when - * the operation is complete. - */ -public class ReleaseChannelFutureListener implements ChannelFutureListener { +public class TeardownSection { - private final Releasable releasable; + public static final TeardownSection EMPTY; - public ReleaseChannelFutureListener(Releasable releasable) { - this.releasable = releasable; + static { + EMPTY = new TeardownSection(); + EMPTY.setSkipSection(SkipSection.EMPTY); } - @Override - public void operationComplete(ChannelFuture future) throws Exception { - releasable.close(); + private SkipSection skipSection; + private List doSections = new ArrayList<>(); + + public SkipSection getSkipSection() { + return skipSection; + } + + public void setSkipSection(SkipSection skipSection) { + this.skipSection = skipSection; + } + + public List getDoSections() { + return doSections; + } + + public void addDoSection(DoSection doSection) { + this.doSections.add(doSection); + } + + public boolean isEmpty() { + return EMPTY.equals(this); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java index 106ff5176c7..c6ea48fd6ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java @@ -75,7 +75,7 @@ public class RestSpec { } restSpec.addApi(restApi); } - } catch (Throwable ex) { + } catch (Exception ex) { throw new IOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java index 66ba1528b90..cf33570ef28 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java @@ -34,7 +34,7 @@ import java.util.List; */ public final class Features { - private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting", "headers", "yaml"); + private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting", "headers", "embedded_stash_key"); private Features() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java index 69acae55fdc..b32308f8cd8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java @@ -78,7 +78,8 @@ public final class FileUtils { * Each input path can either be a single file (the .yaml suffix is optional) or a directory. * Each path is looked up in the classpath, or optionally from {@code fileSystem} if its not null. */ - public static Map> findYamlSuites(FileSystem fileSystem, String optionalPathPrefix, final String... paths) throws IOException { + public static Map> findYamlSuites(FileSystem fileSystem, String optionalPathPrefix, final String... paths) + throws IOException { Map> yamlSuites = new HashMap<>(); for (String path : paths) { collectFiles(resolveFile(fileSystem, optionalPathPrefix, path, YAML_SUFFIX), YAML_SUFFIX, yamlSuites); @@ -86,7 +87,8 @@ public final class FileUtils { return yamlSuites; } - private static Path resolveFile(FileSystem fileSystem, String optionalPathPrefix, String path, String optionalFileSuffix) throws IOException { + private static Path resolveFile(FileSystem fileSystem, String optionalPathPrefix, String path, String optionalFileSuffix) + throws IOException { if (fileSystem != null) { Path file = findFile(fileSystem, path, optionalFileSuffix); if (!lenientExists(file)) { @@ -94,7 +96,8 @@ public final class FileUtils { String newPath = optionalPathPrefix + "/" + path; file = findFile(fileSystem, newPath, optionalFileSuffix); if (!lenientExists(file)) { - throw new NoSuchFileException("path prefix: " + optionalPathPrefix + ", path: " + path + ", file suffix: " + optionalFileSuffix); + throw new NoSuchFileException("path prefix: " + optionalPathPrefix + ", path: " + path + ", file suffix: " + + optionalFileSuffix); } } return file; diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 7b1c3fd936b..1b99d2f32cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -131,11 +131,11 @@ public class MockFSDirectoryService extends FsDirectoryService { ESTestCase.checkIndexFailed = true; logger.warn("check index [failure] index files={}\n{}", Arrays.toString(dir.listAll()), - new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + os.bytes().utf8ToString()); throw new IOException("index check failure"); } else { if (logger.isDebugEnabled()) { - logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + logger.debug("check index [success]\n{}", os.bytes().utf8ToString()); } } } catch (LockObtainFailedException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index b0d16d10c49..ec695e8bd41 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -50,8 +50,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.onTaskRegistered(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about unregistering the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId()); } } } @@ -65,8 +65,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.onTaskUnregistered(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about unregistering the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId()); } } } else { @@ -80,8 +80,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.waitForTaskCompletion(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about waitForTaskCompletion the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about waitForTaskCompletion the task with id {}", e, task.getId()); } } super.waitForTaskCompletion(task, untilInNanos); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 654a1c971ca..f0f85b24f72 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -51,11 +51,11 @@ public class CapturingTransport implements Transport { private TransportServiceAdapter adapter; - static public class CapturedRequest { - final public DiscoveryNode node; - final public long requestId; - final public String action; - final public TransportRequest request; + public static class CapturedRequest { + public final DiscoveryNode node; + public final long requestId; + public final String action; + public final TransportRequest request; public CapturedRequest(DiscoveryNode node, long requestId, String action, TransportRequest request) { this.node = node; @@ -242,19 +242,13 @@ public class CapturingTransport implements Transport { } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} @Override public List getLocalAddresses() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 7688e2842e6..bc371ca02d1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.transport; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.transport.TransportService; @@ -34,7 +33,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -52,7 +50,6 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.local.LocalTransport; import org.elasticsearch.transport.netty.NettyTransport; @@ -383,11 +380,11 @@ public class MockTransportService extends TransportService { BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); final TransportRequest clonedRequest = reg.newRequest(); - clonedRequest.readFrom(StreamInput.wrap(bStream.bytes())); + clonedRequest.readFrom(bStream.bytes().streamInput()); threadPool.schedule(delay, ThreadPool.Names.GENERIC, new AbstractRunnable() { @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.debug("failed to send delayed request", e); } @@ -558,15 +555,13 @@ public class MockTransportService extends TransportService { } @Override - public Transport start() { + public void start() { transport.start(); - return this; } @Override - public Transport stop() { + public void stop() { transport.stop(); - return this; } @Override @@ -641,10 +636,10 @@ public class MockTransportService extends TransportService { } @Override - protected void traceResponseSent(long requestId, String action, Throwable t) { - super.traceResponseSent(requestId, action, t); + protected void traceResponseSent(long requestId, String action, Exception e) { + super.traceResponseSent(requestId, action, e); for (Tracer tracer : activeTracers) { - tracer.responseSent(requestId, action, t); + tracer.responseSent(requestId, action, e); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java index 128cb862e57..db41c42e16a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java @@ -84,7 +84,8 @@ public class FileUtilsTests extends ESTestCase { assertSingleFile(yamlSuites.get(dir.getFileName().toString()), dir.getFileName().toString(), file.getFileName().toString()); //load from external file (optional extension) - yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", dir.resolve("test_loading").toAbsolutePath().toString()); + yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", + dir.resolve("test_loading").toAbsolutePath().toString()); assertThat(yamlSuites, notNullValue()); assertThat(yamlSuites.size(), equalTo(1)); assertThat(yamlSuites.containsKey(dir.getFileName().toString()), equalTo(true)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java deleted file mode 100644 index fefcd57af79..00000000000 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.test; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.Stash; -import org.elasticsearch.test.rest.json.JsonPath; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class JsonPathTests extends ESTestCase { - public void testEvaluateObjectPathEscape() throws Exception { - String json = "{ \"field1\": { \"field2.field3\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2\\.field3"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateObjectPathWithDoubleDot() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1..field2"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateObjectPathEndsWithDot() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2."); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateString() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateInteger() throws Exception { - String json = "{ \"field1\": { \"field2\" : 333 } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(Integer.class)); - assertThat((Integer)object, equalTo(333)); - } - - public void testEvaluateDouble() throws Exception { - String json = "{ \"field1\": { \"field2\" : 3.55 } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(Double.class)); - assertThat((Double)object, equalTo(3.55)); - } - - public void testEvaluateArray() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ \"value1\", \"value2\" ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1"); - assertThat(object, instanceOf(List.class)); - List list = (List) object; - assertThat(list.size(), equalTo(2)); - assertThat(list.get(0), instanceOf(String.class)); - assertThat((String)list.get(0), equalTo("value1")); - assertThat(list.get(1), instanceOf(String.class)); - assertThat((String)list.get(1), equalTo("value2")); - } - - public void testEvaluateArrayElement() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ \"value1\", \"value2\" ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1.1"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateArrayElementObject() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1.1.element"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateArrayElementObjectWrongPath() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array2.1.element"); - assertThat(object, nullValue()); - } - - @SuppressWarnings("unchecked") - public void testEvaluateObjectKeys() throws Exception { - String json = "{ \"metadata\": { \"templates\" : {\"template_1\": { \"field\" : \"value\"}, \"template_2\": { \"field\" : \"value\"} } } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("metadata.templates"); - assertThat(object, instanceOf(Map.class)); - Map map = (Map)object; - assertThat(map.size(), equalTo(2)); - Set strings = map.keySet(); - assertThat(strings, contains("template_1", "template_2")); - } - - @SuppressWarnings("unchecked") - public void testEvaluateEmptyPath() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate(""); - assertThat(object, notNullValue()); - assertThat(object, instanceOf(Map.class)); - assertThat(((Map)object).containsKey("field1"), equalTo(true)); - } - - public void testEvaluateStashInPropertyName() throws Exception { - String json = "{ \"field1\": { \"elements\" : {\"element1\": \"value1\"}}}"; - JsonPath jsonPath = new JsonPath(json); - try { - jsonPath.evaluate("field1.$placeholder.element1"); - fail("evaluate should have failed due to unresolved placeholder"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("stashed value not found for key [$placeholder]")); - } - - Stash stash = new Stash(); - stash.stashValue("placeholder", "elements"); - Object object = jsonPath.evaluate("field1.$placeholder.element1", stash); - assertThat(object, notNullValue()); - assertThat(object.toString(), equalTo("value1")); - } -} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java new file mode 100644 index 00000000000..1d99a73c767 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java @@ -0,0 +1,265 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.Stash; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ObjectPathTests extends ESTestCase { + + private static XContentBuilder randomXContentBuilder() throws IOException { + //only string based formats are supported, no cbor nor smile + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.YAML); + return XContentBuilder.builder(XContentFactory.xContent(xContentType)); + } + + public void testEvaluateObjectPathEscape() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2.field3", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2\\.field3"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + public void testEvaluateObjectPathWithDots() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1..field2"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate("field1.field2."); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + public void testEvaluateInteger() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", 333); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(Integer.class)); + assertThat(object, equalTo(333)); + } + + public void testEvaluateDouble() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", 3.55); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(Double.class)); + assertThat(object, equalTo(3.55)); + } + + public void testEvaluateArray() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.array("array1", "value1", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.array1"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), instanceOf(String.class)); + assertThat(list.get(0), equalTo("value1")); + assertThat(list.get(1), instanceOf(String.class)); + assertThat(list.get(1), equalTo("value2")); + object = objectPath.evaluate("field1.array1.1"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + @SuppressWarnings("unchecked") + public void testEvaluateArrayElementObject() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.startArray("array1"); + xContentBuilder.startObject(); + xContentBuilder.field("element", "value1"); + xContentBuilder.endObject(); + xContentBuilder.startObject(); + xContentBuilder.field("element", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endArray(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.array1.1.element"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate(""); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(Map.class)); + assertThat(((Map)object).containsKey("field1"), equalTo(true)); + object = objectPath.evaluate("field1.array2.1.element"); + assertThat(object, nullValue()); + } + + @SuppressWarnings("unchecked") + public void testEvaluateObjectKeys() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("metadata"); + xContentBuilder.startObject("templates"); + xContentBuilder.startObject("template_1"); + xContentBuilder.field("field", "value"); + xContentBuilder.endObject(); + xContentBuilder.startObject("template_2"); + xContentBuilder.field("field", "value"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("metadata.templates"); + assertThat(object, instanceOf(Map.class)); + Map map = (Map)object; + assertThat(map.size(), equalTo(2)); + Set strings = map.keySet(); + assertThat(strings, contains("template_1", "template_2")); + } + + public void testEvaluateStashInPropertyName() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.startObject("elements"); + xContentBuilder.field("element1", "value1"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + try { + objectPath.evaluate("field1.$placeholder.element1"); + fail("evaluate should have failed due to unresolved placeholder"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("stashed value not found for key [placeholder]")); + } + + // Stashed value is whole property name + Stash stash = new Stash(); + stash.stashValue("placeholder", "elements"); + Object object = objectPath.evaluate("field1.$placeholder.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stash key has dots + Map stashedObject = new HashMap<>(); + stashedObject.put("subobject", "elements"); + stash.stashValue("object", stashedObject); + object = objectPath.evaluate("field1.$object\\.subobject.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is part of property name + stash.stashValue("placeholder", "ele"); + object = objectPath.evaluate("field1.${placeholder}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is inside of property name + stash.stashValue("placeholder", "le"); + object = objectPath.evaluate("field1.e${placeholder}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Multiple stashed values in property name + stash.stashValue("placeholder", "le"); + stash.stashValue("placeholder2", "nts"); + object = objectPath.evaluate("field1.e${placeholder}me${placeholder2}.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is part of property name and has dots + stashedObject.put("subobject", "ele"); + stash.stashValue("object", stashedObject); + object = objectPath.evaluate("field1.${object\\.subobject}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + } + + @SuppressWarnings("unchecked") + public void testEvaluateArrayAsRoot() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startArray(); + xContentBuilder.startObject(); + xContentBuilder.field("alias", "test_alias1"); + xContentBuilder.field("index", "test1"); + xContentBuilder.endObject(); + xContentBuilder.startObject(); + xContentBuilder.field("alias", "test_alias2"); + xContentBuilder.field("index", "test2"); + xContentBuilder.endObject(); + xContentBuilder.endArray(); + ObjectPath objectPath = ObjectPath.createFromXContent(XContentFactory.xContent(XContentType.YAML), xContentBuilder.string()); + Object object = objectPath.evaluate(""); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(List.class)); + assertThat(((List)object).size(), equalTo(2)); + object = objectPath.evaluate("0"); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(Map.class)); + assertThat(((Map)object).get("alias"), equalTo("test_alias1")); + object = objectPath.evaluate("1.index"); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("test2")); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java index 298f230d64a..6b5cc3defb7 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java @@ -54,13 +54,30 @@ public class RestTestParserTests extends ESTestCase { parser.close(); } - public void testParseTestSetupAndSections() throws Exception { - parser = YamlXContent.yamlXContent.createParser( + public void testParseTestSetupTeardownAndSections() throws Exception { + final boolean includeSetup = randomBoolean(); + final boolean includeTeardown = randomBoolean(); + StringBuilder testSpecBuilder = new StringBuilder(); + if (includeSetup) { + testSpecBuilder + .append("---\n" + "setup:\n" + " - do:\n" + " indices.create:\n" + " index: test_index\n" + - "\n" + + "\n"); + } + if (includeTeardown) { + testSpecBuilder + .append("---\n" + + "teardown:\n" + + " - do:\n" + + " indices.delete:\n" + + " index: test_index\n" + + "\n"); + } + parser = YamlXContent.yamlXContent.createParser( + testSpecBuilder.toString() + "---\n" + "\"Get index mapping\":\n" + " - do:\n" + @@ -92,12 +109,30 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite, notNullValue()); assertThat(restTestSuite.getName(), equalTo("suite")); assertThat(restTestSuite.getSetupSection(), notNullValue()); - assertThat(restTestSuite.getSetupSection().getSkipSection().isEmpty(), equalTo(true)); - - assertThat(restTestSuite.getSetupSection().getDoSections().size(), equalTo(1)); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.create")); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), equalTo("test_index")); + if (includeSetup) { + assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getSetupSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getSetupSection().getDoSections().size(), equalTo(1)); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.create")); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), + equalTo("test_index")); + } else { + assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true)); + } + + assertThat(restTestSuite.getTeardownSection(), notNullValue()); + if (includeTeardown) { + assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getTeardownSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTeardownSection().getDoSections().size(), equalTo(1)); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.delete")); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), + equalTo("test_index")); + } else { + assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(true)); + } assertThat(restTestSuite.getTestSections().size(), equalTo(2)); @@ -120,7 +155,8 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); + assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), + equalTo("for newer versions the index name is always returned")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java new file mode 100644 index 00000000000..7d0c0598f09 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.Stash; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; + +public class StashTests extends ESTestCase { + public void testReplaceStashedValuesEmbeddedStashKey() throws IOException { + Stash stash = new Stash(); + stash.stashValue("stashed", "bar"); + + Map expected = new HashMap<>(); + expected.put("key", singletonMap("a", "foobar")); + Map map = new HashMap<>(); + Map map2 = new HashMap<>(); + map2.put("a", "foo${stashed}"); + map.put("key", map2); + + Map actual = stash.replaceStashedValues(map); + assertEquals(expected, actual); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java new file mode 100644 index 00000000000..eeccea5f5e5 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.Version; +import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; +import org.elasticsearch.test.rest.parser.TeardownSectionParser; +import org.elasticsearch.test.rest.section.TeardownSection; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +/** + * Unit tests for the teardown section parser + */ +public class TeardownSectionParserTests extends AbstractParserTestCase { + + public void testParseTeardownSection() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + " - do:\n" + + " delete:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404\n" + + " - do:\n" + + " delete2:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404" + ); + + TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); + TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(section, notNullValue()); + assertThat(section.getSkipSection().isEmpty(), equalTo(true)); + assertThat(section.getDoSections().size(), equalTo(2)); + assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); + assertThat(section.getDoSections().get(1).getApiCallSection().getApi(), equalTo("delete2")); + } + + public void testParseWithSkip() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + " - skip:\n" + + " version: \"2.0.0 - 2.3.0\"\n" + + " reason: \"there is a reason\"\n" + + " - do:\n" + + " delete:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404\n" + + " - do:\n" + + " delete2:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404" + ); + + TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); + TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(section, notNullValue()); + assertThat(section.getSkipSection().isEmpty(), equalTo(false)); + assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); + assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); + assertThat(section.getDoSections().size(), equalTo(2)); + assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); + assertThat(section.getDoSections().get(1).getApiCallSection().getApi(), equalTo("delete2")); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index ae774220ccf..5bf11e4dc98 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -22,23 +22,39 @@ import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.transport.TransportSettings; +import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.function.Function; +import java.util.stream.Collectors; +import static org.elasticsearch.cluster.node.DiscoveryNode.Role.DATA; +import static org.elasticsearch.cluster.node.DiscoveryNode.Role.INGEST; +import static org.elasticsearch.cluster.node.DiscoveryNode.Role.MASTER; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.not; /** * Basic test that ensure that the internal cluster reproduces the same @@ -74,7 +90,7 @@ public class InternalTestClusterTests extends ESTestCase { * a set of settings that are expected to have different values betweem clusters, even they have been initialized with the same * base settins. */ - final static Set clusterUniqueSettings = new HashSet<>(); + static final Set clusterUniqueSettings = new HashSet<>(); static { clusterUniqueSettings.add(ClusterName.CLUSTER_NAME_SETTING.getKey()); @@ -153,4 +169,139 @@ public class InternalTestClusterTests extends ESTestCase { IOUtils.close(cluster0, cluster1); } } + + public void testDataFolderAssignmentAndCleaning() throws IOException, InterruptedException { + long clusterSeed = randomLong(); + boolean masterNodes = randomBoolean(); + // we need one stable node + int minNumDataNodes = 2; + int maxNumDataNodes = 2; + final String clusterName1 = "shared1"; + NodeConfigurationSource nodeConfigurationSource = NodeConfigurationSource.EMPTY; + int numClientNodes = randomIntBetween(0, 2); + boolean enableHttpPipelining = randomBoolean(); + String nodePrefix = "test"; + Path baseDir = createTempDir(); + InternalTestCluster cluster = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, + enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); + try { + cluster.beforeTest(random(), 0.0); + final Map shardNodePaths = new HashMap<>(); + for (String name: cluster.getNodeNames()) { + shardNodePaths.put(name, getNodePaths(cluster, name)); + } + String poorNode = randomFrom(cluster.getNodeNames()); + Path dataPath = getNodePaths(cluster, poorNode)[0]; + final Path testMarker = dataPath.resolve("testMarker"); + Files.createDirectories(testMarker); + cluster.stopRandomNode(InternalTestCluster.nameFilter(poorNode)); + assertFileExists(testMarker); // stopping a node half way shouldn't clean data + + final String stableNode = randomFrom(cluster.getNodeNames()); + final Path stableDataPath = getNodePaths(cluster, stableNode)[0]; + final Path stableTestMarker = stableDataPath.resolve("stableTestMarker"); + assertThat(stableDataPath, not(dataPath)); + Files.createDirectories(stableTestMarker); + + final String newNode1 = cluster.startNode(); + assertThat(getNodePaths(cluster, newNode1)[0], equalTo(dataPath)); + assertFileExists(testMarker); // starting a node should re-use data folders and not clean it + + final String newNode2 = cluster.startNode(); + final Path newDataPath = getNodePaths(cluster, newNode2)[0]; + final Path newTestMarker = newDataPath.resolve("newTestMarker"); + assertThat(newDataPath, not(dataPath)); + Files.createDirectories(newTestMarker); + cluster.beforeTest(random(), 0.0); + assertFileNotExists(newTestMarker); // the cluster should be reset for a new test, cleaning up the extra path we made + assertFileNotExists(testMarker); // a new unknown node used this path, it should be cleaned + assertFileExists(stableTestMarker); // but leaving the structure of existing, reused nodes + for (String name: cluster.getNodeNames()) { + assertThat("data paths for " + name + " changed", getNodePaths(cluster, name), equalTo(shardNodePaths.get(name))); + } + + cluster.beforeTest(random(), 0.0); + assertFileExists(stableTestMarker); // but leaving the structure of existing, reused nodes + for (String name: cluster.getNodeNames()) { + assertThat("data paths for " + name + " changed", getNodePaths(cluster, name), + equalTo(shardNodePaths.get(name))); + } + + } finally { + cluster.close(); + } + } + + private Path[] getNodePaths(InternalTestCluster cluster, String name) { + final NodeEnvironment nodeEnvironment = cluster.getInstance(NodeEnvironment.class, name); + if (nodeEnvironment.hasNodeFile()) { + return nodeEnvironment.nodeDataPaths(); + } else { + return new Path[0]; + } + } + + public void testDifferentRolesMaintainPathOnRestart() throws Exception { + final Path baseDir = createTempDir(); + InternalTestCluster cluster = new InternalTestCluster("local", randomLong(), baseDir, true, 0, 0, "test", + new NodeConfigurationSource() { + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0).build(); + } + + @Override + public Settings transportClientSettings() { + return Settings.EMPTY; + } + }, 0, randomBoolean(), "", Collections.emptyList(), Function.identity()); + cluster.beforeTest(random(), 0.0); + try { + Map> pathsPerRole = new HashMap<>(); + for (int i = 0; i < 5; i++) { + final DiscoveryNode.Role role = randomFrom(MASTER, DiscoveryNode.Role.DATA, DiscoveryNode.Role.INGEST); + final String node; + switch (role) { + case MASTER: + node = cluster.startMasterOnlyNode(Settings.EMPTY); + break; + case DATA: + node = cluster.startDataOnlyNode(Settings.EMPTY); + break; + case INGEST: + node = cluster.startCoordinatingOnlyNode(Settings.EMPTY); + break; + default: + throw new IllegalStateException("get your story straight"); + } + Set rolePaths = pathsPerRole.computeIfAbsent(role, k -> new HashSet<>()); + for (Path path : getNodePaths(cluster, node)) { + assertTrue(rolePaths.add(path.toString())); + } + } + cluster.fullRestart(); + + Map> result = new HashMap<>(); + for (String name : cluster.getNodeNames()) { + DiscoveryNode node = cluster.getInstance(ClusterService.class, name).localNode(); + List paths = Arrays.stream(getNodePaths(cluster, name)).map(Path::toString).collect(Collectors.toList()); + if (node.isMasterNode()) { + result.computeIfAbsent(MASTER, k -> new HashSet<>()).addAll(paths); + } else if (node.isDataNode()) { + result.computeIfAbsent(DATA, k -> new HashSet<>()).addAll(paths); + } else { + result.computeIfAbsent(INGEST, k -> new HashSet<>()).addAll(paths); + } + } + + assertThat(result.size(), equalTo(pathsPerRole.size())); + for (DiscoveryNode.Role role : result.keySet()) { + assertThat("path are not the same for " + role, result.get(role), equalTo(pathsPerRole.get(role))); + } + } finally { + cluster.close(); + } + + } } diff --git a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java index 25d4052c162..041d21cc762 100644 --- a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java +++ b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java @@ -82,7 +82,7 @@ public class ESLoggerUsageChecker { Files.walkFileTree(root, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (Files.isRegularFile(file) && file.endsWith(".class")) { + if (Files.isRegularFile(file) && file.getFileName().toString().endsWith(".class")) { try (InputStream in = Files.newInputStream(file)) { ESLoggerUsageChecker.check(wrongUsageCallback, in); } @@ -248,19 +248,6 @@ public class ESLoggerUsageChecker { if (LOGGER_METHODS.contains(methodInsn.name) == false) { continue; } - Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc); - BasicValue logMessageLengthObject = getStackValue(stringFrames[i], argumentTypes.length - 1); // first argument - if (logMessageLengthObject instanceof PlaceHolderStringBasicValue == false) { - wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, - "First argument must be a string constant so that we can statically ensure proper place holder usage")); - continue; - } - PlaceHolderStringBasicValue logMessageLength = (PlaceHolderStringBasicValue) logMessageLengthObject; - if (logMessageLength.minValue != logMessageLength.maxValue) { - wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, - "Multiple log messages with conflicting number of place holders")); - continue; - } BasicValue varArgsSizeObject = getStackValue(arraySizeFrames[i], 0); // last argument if (varArgsSizeObject instanceof ArraySizeBasicValue == false) { wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, @@ -268,6 +255,24 @@ public class ESLoggerUsageChecker { continue; } ArraySizeBasicValue varArgsSize = (ArraySizeBasicValue) varArgsSizeObject; + Type[] argumentTypes = Type.getArgumentTypes(methodInsn.desc); + BasicValue logMessageLengthObject = getStackValue(stringFrames[i], argumentTypes.length - 1); // first argument + if (logMessageLengthObject instanceof PlaceHolderStringBasicValue == false) { + if (varArgsSize.minValue > 0) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "First argument must be a string constant so that we can statically ensure proper place holder usage")); + continue; + } else { + // don't check logger usage for logger.warn(someObject) as someObject will be fully logged + continue; + } + } + PlaceHolderStringBasicValue logMessageLength = (PlaceHolderStringBasicValue) logMessageLengthObject; + if (logMessageLength.minValue != logMessageLength.maxValue) { + wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, + "Multiple log messages with conflicting number of place holders")); + continue; + } if (varArgsSize.minValue != varArgsSize.maxValue) { wrongUsageCallback.accept(new WrongLoggerUsage(className, methodNode.name, methodInsn.name, lineNumber, "Multiple parameter arrays with conflicting sizes")); diff --git a/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java index ab07ecbf45e..73449f4351c 100644 --- a/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java +++ b/test/logger-usage/src/test/java/org/elasticsearch/test/loggerusage/ESLoggerUsageTests.java @@ -129,8 +129,12 @@ public class ESLoggerUsageTests extends ESTestCase { logger.info("Hello {}, {}", "world", 42, new Exception()); } - public void checkFailNonConstantMessage(boolean b) { - logger.info(Boolean.toString(b)); + public void checkNonConstantMessageWithZeroArguments(boolean b) { + logger.info(Boolean.toString(b), new Exception()); + } + + public void checkFailNonConstantMessageWithArguments(boolean b) { + logger.info(Boolean.toString(b), new Exception(), 42); } public void checkComplexUsage(boolean b) {